text
stringlengths 27
775k
|
|---|
"""
StdDiagnostics
This module defines many standard diagnostic variables and groups that may
be used directly by experiments.
"""
module StdDiagnostics
using CLIMAParameters
using CLIMAParameters.Planet
using CLIMAParameters.Atmos
using CLIMAParameters.SubgridScale
using KernelAbstractions
using MPI
using OrderedCollections
using Printf
using StaticArrays
using ..Diagnostics # until old diagnostics groups are removed
using ..Atmos
using ..BalanceLaws
using ..ConfigTypes
using ..DGMethods
using ..DiagnosticsMachine
import ..DiagnosticsMachine:
Settings,
dv_name,
dv_attrib,
dv_args,
dv_project,
dv_scale,
dv_PointwiseDiagnostic,
dv_HorizontalAverage
using ..Mesh.Grids
using ..Mesh.Interpolation
using ..Mesh.Topologies
using ..MPIStateArrays
using Thermodynamics
using ..TurbulenceClosures
using ..VariableTemplates
using ..Writers
# Pre-defined diagnostic variables
# Atmos
include("atmos_les_diagnostic_vars.jl")
include("atmos_gcm_diagnostic_vars.jl")
# Pre-defined diagnostics groups
# Atmos
include("atmos_les_default.jl")
include("atmos_gcm_default.jl")
end # module StdDiagnostics
|
// Copyright(c) 2021 Hansen Audio.
pub const NUM_CHANNELS: usize = 4;
pub type AudioFrame = [f32; NUM_CHANNELS];
pub mod cbindings;
mod detail;
pub mod trance_gate;
|
import 'package:redux/redux.dart';
import 'package:storyboard/redux/models/note_repo.dart';
import '../actions/actions.dart';
import '../models/note.dart';
final noteReducer = combineReducers<NoteRepo>([
TypedReducer<NoteRepo, FetchNotesAction>(_fetchNotes),
TypedReducer<NoteRepo, CreateNoteAction>(_createNote),
TypedReducer<NoteRepo, UpdateNoteAction>(_updateNote),
TypedReducer<NoteRepo, DeleteNoteAction>(_deleteNote),
]);
NoteRepo _fetchNotes(
NoteRepo noteRepo,
FetchNotesAction action,
) {
Map<String, Note> newNotes = Map();
Map<String, Note> existedNotes = Map();
Set<String> removeUuids = Set();
int lastTS = noteRepo.lastTS;
action.noteMap.forEach((uuid, element) {
if (noteRepo.notes[uuid] == null) {
if (element.deleted == 0) {
newNotes[uuid] = element;
}
} else if (element.deleted == 0) {
existedNotes[uuid] = element;
} else {
removeUuids.add(element.uuid);
}
if (element.ts > lastTS) {
lastTS = element.ts;
}
});
// merge
return noteRepo.copyWith(
notes: Map.from(noteRepo.notes).map((uuid, note) =>
MapEntry(uuid, existedNotes[uuid] != null ? existedNotes[uuid] : note))
..addAll(newNotes)
..removeWhere((uuid, note) => removeUuids.contains(uuid)),
lastTS: lastTS,
);
}
NoteRepo _createNote(
NoteRepo noteRepo,
CreateNoteAction action,
) {
return noteRepo.copyWith(
notes: Map.from(noteRepo.notes)..addAll({action.note.uuid: action.note}),
);
}
NoteRepo _updateNote(
NoteRepo noteRepo,
UpdateNoteAction action,
) {
return noteRepo.copyWith(
notes: Map.from(noteRepo.notes).map((uuid, note) =>
MapEntry(uuid, uuid == action.note.uuid ? action.note : note)),
);
}
NoteRepo _deleteNote(
NoteRepo noteRepo,
DeleteNoteAction action,
) {
return noteRepo.copyWith(
notes: Map.from(noteRepo.notes)..remove(action.uuid),
);
}
|
# aws_glue_security_configuration
[back](../aws.md)
### Index
- [Example Usage](#example-usage)
- [Variables](#variables)
- [Resource](#resource)
- [Outputs](#outputs)
### Terraform
```terraform
terraform {
required_providers {
aws = ">= 3.35.0"
}
}
```
[top](#index)
### Example Usage
```terraform
module "aws_glue_security_configuration" {
source = "./modules/aws/r/aws_glue_security_configuration"
# name - (required) is a type of string
name = null
encryption_configuration = [{
cloudwatch_encryption = [{
cloudwatch_encryption_mode = null
kms_key_arn = null
}]
job_bookmarks_encryption = [{
job_bookmarks_encryption_mode = null
kms_key_arn = null
}]
s3_encryption = [{
kms_key_arn = null
s3_encryption_mode = null
}]
}]
}
```
[top](#index)
### Variables
```terraform
variable "name" {
description = "(required)"
type = string
}
variable "encryption_configuration" {
description = "nested block: NestingList, min items: 1, max items: 1"
type = set(object(
{
cloudwatch_encryption = list(object(
{
cloudwatch_encryption_mode = string
kms_key_arn = string
}
))
job_bookmarks_encryption = list(object(
{
job_bookmarks_encryption_mode = string
kms_key_arn = string
}
))
s3_encryption = list(object(
{
kms_key_arn = string
s3_encryption_mode = string
}
))
}
))
}
```
[top](#index)
### Resource
```terraform
resource "aws_glue_security_configuration" "this" {
# name - (required) is a type of string
name = var.name
dynamic "encryption_configuration" {
for_each = var.encryption_configuration
content {
dynamic "cloudwatch_encryption" {
for_each = encryption_configuration.value.cloudwatch_encryption
content {
# cloudwatch_encryption_mode - (optional) is a type of string
cloudwatch_encryption_mode = cloudwatch_encryption.value["cloudwatch_encryption_mode"]
# kms_key_arn - (optional) is a type of string
kms_key_arn = cloudwatch_encryption.value["kms_key_arn"]
}
}
dynamic "job_bookmarks_encryption" {
for_each = encryption_configuration.value.job_bookmarks_encryption
content {
# job_bookmarks_encryption_mode - (optional) is a type of string
job_bookmarks_encryption_mode = job_bookmarks_encryption.value["job_bookmarks_encryption_mode"]
# kms_key_arn - (optional) is a type of string
kms_key_arn = job_bookmarks_encryption.value["kms_key_arn"]
}
}
dynamic "s3_encryption" {
for_each = encryption_configuration.value.s3_encryption
content {
# kms_key_arn - (optional) is a type of string
kms_key_arn = s3_encryption.value["kms_key_arn"]
# s3_encryption_mode - (optional) is a type of string
s3_encryption_mode = s3_encryption.value["s3_encryption_mode"]
}
}
}
}
}
```
[top](#index)
### Outputs
```terraform
output "id" {
description = "returns a string"
value = aws_glue_security_configuration.this.id
}
output "this" {
value = aws_glue_security_configuration.this
}
```
[top](#index)
|
// Copyright 2010-2011, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Tests for session converter.
//
// Note that we have a lot of tests which assume that the converter fills
// T13Ns. If you want to add test case related to T13Ns, please make sure
// you set T13Ns to the result for a mock converter.
#include <set>
#include <string>
#include <vector>
#include "base/base.h"
#include "base/util.h"
#include "config/config.pb.h"
#include "config/config_handler.h"
#include "converter/converter_interface.h"
#include "converter/converter_mock.h"
#include "converter/segments.h"
#include "composer/composer.h"
#include "composer/table.h"
#include "session/internal/keymap.h"
#include "session/session_converter.h"
#include "session/internal/candidate_list.h"
#include "testing/base/public/gunit.h"
#include "testing/base/public/googletest.h"
#include "transliteration/transliteration.h"
DECLARE_string(test_tmpdir);
namespace mozc {
namespace session {
static const char kChars_Mo[] = "\xE3\x82\x82";
static const char kChars_Mozuku[] = "\xE3\x82\x82\xE3\x81\x9A\xE3\x81\x8F";
static const char kChars_Mozukusu[] =
"\xE3\x82\x82\xE3\x81\x9A\xE3\x81\x8F\xE3\x81\x99";
static const char kChars_Momonga[] =
"\xE3\x82\x82\xE3\x82\x82\xE3\x82\x93\xE3\x81\x8C";
class SessionConverterTest : public testing::Test {
protected:
virtual void SetUp() {
convertermock_.reset(new ConverterMock());
ConverterFactory::SetConverter(convertermock_.get());
Util::SetUserProfileDirectory(FLAGS_test_tmpdir);
config::Config config;
config::ConfigHandler::GetDefaultConfig(&config);
config::ConfigHandler::SetConfig(config);
table_.reset(new composer::Table);
table_->Initialize();
composer_.reset(new composer::Composer);
composer_->SetTable(table_.get());
// "あいうえお"
aiueo_ = "\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a";
}
virtual void TearDown() {
table_.reset();
composer_.reset();
// just in case, reset the config in test_tmpdir
config::Config config;
config::ConfigHandler::GetDefaultConfig(&config);
config::ConfigHandler::SetConfig(config);
}
// set result for "あいうえお"
void SetAiueo(Segments *segments) {
segments->Clear();
Segment *segment;
Segment::Candidate *candidate;
segment = segments->add_segment();
// "あいうえお"
segment->set_key(
"\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a");
candidate = segment->add_candidate();
// "あいうえお"
candidate->value =
"\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a";
candidate = segment->add_candidate();
// "アイウエオ"
candidate->value =
"\xe3\x82\xa2\xe3\x82\xa4\xe3\x82\xa6\xe3\x82\xa8\xe3\x82\xaa";
}
// set result for "かまぼこのいんぼう"
void SetKamaboko(Segments *segments) {
Segment *segment;
Segment::Candidate *candidate;
segments->Clear();
segment = segments->add_segment();
// "かまぼこの"
segment->set_key(
"\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae");
candidate = segment->add_candidate();
// "かまぼこの"
candidate->value =
"\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae";
candidate = segment->add_candidate();
// "カマボコの"
candidate->value =
"\xe3\x82\xab\xe3\x83\x9e\xe3\x83\x9c\xe3\x82\xb3\xe3\x81\xae";
segment = segments->add_segment();
// "いんぼう"
segment->set_key("\xe3\x81\x84\xe3\x82\x93\xe3\x81\xbc\xe3\x81\x86");
candidate = segment->add_candidate();
// "陰謀"
candidate->value = "\xe9\x99\xb0\xe8\xac\x80";
candidate = segment->add_candidate();
// "印房"
candidate->value = "\xe5\x8d\xb0\xe6\x88\xbf";
// Set dummy T13Ns
vector<Segment::Candidate> *meta_candidates =
segment->mutable_meta_candidates();
meta_candidates->resize(transliteration::NUM_T13N_TYPES);
for (size_t i = 0; i < transliteration::NUM_T13N_TYPES; ++i) {
meta_candidates->at(i).Init();
meta_candidates->at(i).value = segment->key();
meta_candidates->at(i).content_value = segment->key();
meta_candidates->at(i).content_key = segment->key();
}
}
// set T13N candidates to segments using composer
void FillT13Ns(Segments *segments, const composer::Composer *composer) {
size_t composition_pos = 0;
for (size_t i = 0; i < segments->conversion_segments_size(); ++i) {
Segment *segment = segments->mutable_conversion_segment(i);
CHECK(segment);
const size_t composition_len = Util::CharsLen(segment->key());
vector<string> t13ns;
composer->GetSubTransliterations(
composition_pos, composition_len, &t13ns);
vector<Segment::Candidate> *meta_candidates =
segment->mutable_meta_candidates();
meta_candidates->resize(transliteration::NUM_T13N_TYPES);
for (size_t j = 0; j < transliteration::NUM_T13N_TYPES; ++j) {
meta_candidates->at(j).Init();
meta_candidates->at(j).value = t13ns[j];
meta_candidates->at(j).content_value = t13ns[j];
meta_candidates->at(j).content_key = segment->key();
}
composition_pos += composition_len;
}
}
// set result for "like"
void InitConverterWithLike(Segments *segments) {
// "ぃ"
composer_->InsertCharacterKeyAndPreedit("li", "\xE3\x81\x83");
// "け"
composer_->InsertCharacterKeyAndPreedit("ke", "\xE3\x81\x91");
Segment *segment;
Segment::Candidate *candidate;
segments->Clear();
segment = segments->add_segment();
// "ぃ"
segment->set_key("\xE3\x81\x83");
candidate = segment->add_candidate();
// "ぃ"
candidate->value = "\xE3\x81\x83";
candidate = segment->add_candidate();
// "ィ"
candidate->value = "\xE3\x82\xA3";
segment = segments->add_segment();
// "け"
segment->set_key("\xE3\x81\x91");
candidate = segment->add_candidate();
// "家"
candidate->value = "\xE5\xAE\xB6";
candidate = segment->add_candidate();
// "け"
candidate->value = "\xE3\x81\x91";
FillT13Ns(segments, composer_.get());
convertermock_->SetStartConversionWithComposer(segments, true);
}
void InsertASCIISequence(const string text, composer::Composer *composer) {
for (size_t i = 0; i < text.size(); ++i) {
commands::KeyEvent key;
key.set_key_code(text[i]);
composer->InsertCharacterKeyEvent(key);
}
}
void ExpectSameSessionConverter(const SessionConverter &lhs,
const SessionConverter &rhs) {
EXPECT_EQ(lhs.IsActive(), rhs.IsActive());
EXPECT_EQ(lhs.IsCandidateListVisible(), rhs.IsCandidateListVisible());
EXPECT_EQ(lhs.GetSegmentIndex(), rhs.GetSegmentIndex());
EXPECT_EQ(lhs.GetOperationPreferences().use_cascading_window,
rhs.GetOperationPreferences().use_cascading_window);
EXPECT_EQ(lhs.GetOperationPreferences().candidate_shortcuts,
rhs.GetOperationPreferences().candidate_shortcuts);
EXPECT_EQ(lhs.conversion_preferences().use_history,
rhs.conversion_preferences().use_history);
EXPECT_EQ(lhs.conversion_preferences().max_history_size,
rhs.conversion_preferences().max_history_size);
EXPECT_EQ(lhs.IsCandidateListVisible(),
rhs.IsCandidateListVisible());
Segments segments_lhs, segments_rhs;
lhs.GetSegments(&segments_lhs);
rhs.GetSegments(&segments_rhs);
EXPECT_EQ(segments_lhs.segments_size(),
segments_rhs.segments_size());
for (size_t i = 0; i < segments_lhs.segments_size(); ++i) {
Segment segment_lhs, segment_rhs;
segment_lhs.CopyFrom(segments_lhs.segment(i));
segment_rhs.CopyFrom(segments_rhs.segment(i));
EXPECT_EQ(segment_lhs.key(), segment_rhs.key()) << " i=" << i;
EXPECT_EQ(segment_lhs.segment_type(),
segment_rhs.segment_type()) << " i=" << i;
EXPECT_EQ(segment_lhs.candidates_size(), segment_rhs.candidates_size());
}
const CandidateList &candidate_list_lhs = lhs.GetCandidateList();
const CandidateList &candidate_list_rhs = rhs.GetCandidateList();
EXPECT_EQ(candidate_list_lhs.name(), candidate_list_rhs.name());
EXPECT_EQ(candidate_list_lhs.page_size(), candidate_list_rhs.page_size());
EXPECT_EQ(candidate_list_lhs.size(), candidate_list_rhs.size());
EXPECT_EQ(candidate_list_lhs.last_index(), candidate_list_rhs.last_index());
EXPECT_EQ(candidate_list_lhs.focused_id(), candidate_list_rhs.focused_id());
EXPECT_EQ(candidate_list_lhs.focused_index(),
candidate_list_rhs.focused_index());
EXPECT_EQ(candidate_list_lhs.focused(), candidate_list_rhs.focused());
for (int i = 0; i < candidate_list_lhs.size(); ++i) {
const Candidate &candidate_lhs = candidate_list_lhs.candidate(i);
const Candidate &candidate_rhs = candidate_list_rhs.candidate(i);
EXPECT_EQ(candidate_lhs.id(), candidate_rhs.id());
EXPECT_EQ(candidate_lhs.attributes(), candidate_rhs.attributes());
EXPECT_EQ(candidate_lhs.IsSubcandidateList(),
candidate_rhs.IsSubcandidateList());
if (candidate_lhs.IsSubcandidateList()) {
EXPECT_EQ(candidate_lhs.subcandidate_list().size(),
candidate_rhs.subcandidate_list().size());
}
}
const commands::Result result_lhs = lhs.GetResult();
const commands::Result result_rhs = rhs.GetResult();
EXPECT_EQ(result_lhs.type(), result_rhs.type());
EXPECT_EQ(result_lhs.value(), result_rhs.value());
EXPECT_EQ(result_lhs.key(), result_rhs.key());
}
scoped_ptr<ConverterMock> convertermock_;
scoped_ptr<composer::Composer> composer_;
scoped_ptr<composer::Table> table_;
string aiueo_;
};
TEST_F(SessionConverterTest, Convert) {
SessionConverter converter(convertermock_.get());
Segments segments;
SetAiueo(&segments);
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
composer_->InsertCharacterPreedit(aiueo_);
EXPECT_TRUE(converter.Convert(*composer_));
ASSERT_TRUE(converter.IsActive());
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
EXPECT_EQ(commands::Preedit::Segment::HIGHLIGHT,
conversion.segment(0).annotation());
EXPECT_EQ(aiueo_, conversion.segment(0).value());
EXPECT_EQ(aiueo_, conversion.segment(0).key());
// Converter should be active before submittion
EXPECT_TRUE(converter.IsActive());
EXPECT_FALSE(converter.IsCandidateListVisible());
converter.Commit();
composer_->Reset();
output.Clear();
converter.FillOutput(*composer_, &output);
EXPECT_TRUE(output.has_result());
EXPECT_FALSE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Result &result = output.result();
EXPECT_EQ(aiueo_, result.value());
EXPECT_EQ(aiueo_, result.key());
// Converter should be inactive after submittion
EXPECT_FALSE(converter.IsActive());
EXPECT_FALSE(converter.IsCandidateListVisible());
}
TEST_F(SessionConverterTest, ConvertWithSpellingCorrection) {
SessionConverter converter(convertermock_.get());
Segments segments;
SetAiueo(&segments);
FillT13Ns(&segments, composer_.get());
segments.mutable_conversion_segment(0)->mutable_candidate(0)->attributes |=
Segment::Candidate::SPELLING_CORRECTION;
convertermock_->SetStartConversionWithComposer(&segments, true);
composer_->InsertCharacterPreedit(aiueo_);
EXPECT_TRUE(converter.Convert(*composer_));
ASSERT_TRUE(converter.IsActive());
EXPECT_TRUE(converter.IsCandidateListVisible());
}
TEST_F(SessionConverterTest, ConvertToTransliteration) {
SessionConverter converter(convertermock_.get());
Segments segments;
SetAiueo(&segments);
composer_->InsertCharacterKeyAndPreedit("aiueo", aiueo_);
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
EXPECT_TRUE(converter.ConvertToTransliteration(*composer_,
transliteration::HALF_ASCII));
{ // Check the conversion #1
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
EXPECT_EQ("aiueo", conversion.segment(0).value());
EXPECT_FALSE(converter.IsCandidateListVisible());
}
EXPECT_TRUE(converter.ConvertToTransliteration(*composer_,
transliteration::HALF_ASCII));
{ // Check the conversion #2
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
EXPECT_EQ("AIUEO", conversion.segment(0).value());
EXPECT_FALSE(converter.IsCandidateListVisible());
}
EXPECT_TRUE(converter.ConvertToTransliteration(*composer_,
transliteration::FULL_ASCII));
{ // Check the conversion #3
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
EXPECT_EQ("\xEF\xBC\xA1\xEF\xBC\xA9\xEF\xBC\xB5\xEF\xBC\xA5\xEF\xBC\xAF",
conversion.segment(0).value());
EXPECT_FALSE(converter.IsCandidateListVisible());
}
}
TEST_F(SessionConverterTest, ConvertToTransliterationWithMultipleSegments) {
Segments segments;
InitConverterWithLike(&segments);
SessionConverter converter(convertermock_.get());
// Convert
EXPECT_TRUE(converter.Convert(*composer_));
{ // Check the conversion #1
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(2, conversion.segment_size());
// "ぃ"
EXPECT_EQ("\xE3\x81\x83", conversion.segment(0).value());
// "家"
EXPECT_EQ("\xE5\xAE\xB6", conversion.segment(1).value());
EXPECT_FALSE(converter.IsCandidateListVisible());
}
// Convert to half-width alphanumeric.
EXPECT_TRUE(converter.ConvertToTransliteration(*composer_,
transliteration::HALF_ASCII));
{ // Check the conversion #2
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(2, conversion.segment_size());
EXPECT_EQ("li", conversion.segment(0).value());
EXPECT_FALSE(converter.IsCandidateListVisible());
}
}
TEST_F(SessionConverterTest, ConvertToTransliterationWithoutCascadigWindow) {
SessionConverter converter(convertermock_.get());
Segments segments;
{
Segment *segment;
Segment::Candidate *candidate;
segment = segments.add_segment();
segment->set_key("dvd");
candidate = segment->add_candidate();
candidate->value = "dvd";
candidate = segment->add_candidate();
candidate->value = "DVD";
}
{ // Set OperationPreferences
OperationPreferences preferences;
preferences.use_cascading_window = false;
preferences.candidate_shortcuts = "";
converter.SetOperationPreferences(preferences);
}
// "dvd"
composer_->InsertCharacterKeyAndPreedit(
"dvd", "\xEF\xBD\x84\xEF\xBD\x96\xEF\xBD\x84");
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
EXPECT_TRUE(converter.ConvertToTransliteration(*composer_,
transliteration::FULL_ASCII));
{ // Check the conversion #1
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "dvd"
EXPECT_EQ("\xEF\xBD\x84\xEF\xBD\x96\xEF\xBD\x84",
conversion.segment(0).value());
EXPECT_FALSE(converter.IsCandidateListVisible());
}
EXPECT_TRUE(converter.ConvertToTransliteration(*composer_,
transliteration::FULL_ASCII));
{ // Check the conversion #2
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "DVD"
EXPECT_EQ("\xEF\xBC\xA4\xEF\xBC\xB6\xEF\xBC\xA4",
conversion.segment(0).value());
EXPECT_FALSE(converter.IsCandidateListVisible());
}
EXPECT_TRUE(converter.ConvertToTransliteration(*composer_,
transliteration::FULL_ASCII));
{ // Check the conversion #3
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "Dvd"
EXPECT_EQ("\xEF\xBC\xA4\xEF\xBD\x96\xEF\xBD\x84",
conversion.segment(0).value());
EXPECT_FALSE(converter.IsCandidateListVisible());
}
}
TEST_F(SessionConverterTest, MultiSegmentsConversion) {
SessionConverter converter(convertermock_.get());
Segments segments;
SetKamaboko(&segments);
const string kKamabokono =
"\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae";
const string kInbou =
"\xe3\x81\x84\xe3\x82\x93\xe3\x81\xbc\xe3\x81\x86";
// "かまぼこのいんぼう"
composer_->InsertCharacterPreedit(kKamabokono + kInbou);
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
EXPECT_TRUE(converter.Convert(*composer_));
// Test for conversion
{
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(2, conversion.segment_size());
EXPECT_EQ(commands::Preedit::Segment::HIGHLIGHT,
conversion.segment(0).annotation());
EXPECT_EQ(kKamabokono, conversion.segment(0).key());
EXPECT_EQ(kKamabokono, conversion.segment(0).value());
EXPECT_EQ(commands::Preedit::Segment::UNDERLINE,
conversion.segment(1).annotation());
EXPECT_EQ(kInbou, conversion.segment(1).key());
// "陰謀"
EXPECT_EQ("\xe9\x99\xb0\xe8\xac\x80", conversion.segment(1).value());
}
EXPECT_FALSE(converter.IsCandidateListVisible());
converter.CandidateNext(*composer_);
EXPECT_TRUE(converter.IsCandidateListVisible());
converter.CandidatePrev();
EXPECT_TRUE(converter.IsCandidateListVisible());
// Test for candidates
{
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(0, candidates.focused_index());
EXPECT_EQ(3, candidates.size()); // two candidates + one t13n sub list.
EXPECT_EQ(0, candidates.position());
EXPECT_EQ(kKamabokono, candidates.candidate(0).value());
// "カマボコの"
EXPECT_EQ("\xe3\x82\xab\xe3\x83\x9e\xe3\x83\x9c\xe3\x82\xb3\xe3\x81\xae",
candidates.candidate(1).value());
// "そのほかの文字種";
EXPECT_EQ("\xe3\x81\x9d\xe3\x81\xae\xe3\x81\xbb\xe3\x81\x8b\xe3\x81\xae"
"\xe6\x96\x87\xe5\xad\x97\xe7\xa8\xae",
candidates.candidate(2).value());
}
// Test for segment motion. [SegmentFocusRight]
{
converter.SegmentFocusRight();
EXPECT_FALSE(converter.IsCandidateListVisible());
converter.SetCandidateListVisible(true);
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(0, candidates.focused_index());
EXPECT_EQ(3, candidates.size()); // two candidates + one t13n sub list.
EXPECT_EQ(5, candidates.position());
// "陰謀"
EXPECT_EQ("\xe9\x99\xb0\xe8\xac\x80",
candidates.candidate(0).value());
// "印房"
EXPECT_EQ("\xe5\x8d\xb0\xe6\x88\xbf",
candidates.candidate(1).value());
// "そのほかの文字種";
EXPECT_EQ("\xe3\x81\x9d\xe3\x81\xae\xe3\x81\xbb\xe3\x81\x8b\xe3\x81\xae"
"\xe6\x96\x87\xe5\xad\x97\xe7\xa8\xae",
candidates.candidate(2).value());
}
// Test for segment motion. [SegmentFocusLeft]
{
converter.SegmentFocusLeft();
EXPECT_FALSE(converter.IsCandidateListVisible());
converter.SetCandidateListVisible(true);
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(0, candidates.focused_index());
EXPECT_EQ(3, candidates.size()); // two candidates + one t13n sub list.
EXPECT_EQ(0, candidates.position());
EXPECT_EQ(kKamabokono, candidates.candidate(0).value());
// "カマボコの"
EXPECT_EQ("\xe3\x82\xab\xe3\x83\x9e\xe3\x83\x9c\xe3\x82\xb3\xe3\x81\xae",
candidates.candidate(1).value());
// "そのほかの文字種";
EXPECT_EQ("\xe3\x81\x9d\xe3\x81\xae\xe3\x81\xbb\xe3\x81\x8b\xe3\x81\xae"
"\xe6\x96\x87\xe5\xad\x97\xe7\xa8\xae",
candidates.candidate(2).value());
}
// Test for segment motion. [SegmentFocusLeft] at the head of segments.
// http://b/2990134
// Focus changing at the tail of segments to right,
// and at the head of segments to left, should work.
{
converter.SegmentFocusLeft();
EXPECT_FALSE(converter.IsCandidateListVisible());
converter.SetCandidateListVisible(true);
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(0, candidates.focused_index());
EXPECT_EQ(3, candidates.size()); // two candidates + one t13n sub list.
EXPECT_EQ(5, candidates.position());
// "陰謀"
EXPECT_EQ("\xe9\x99\xb0\xe8\xac\x80",
candidates.candidate(0).value());
// "印房"
EXPECT_EQ("\xe5\x8d\xb0\xe6\x88\xbf",
candidates.candidate(1).value());
// "そのほかの文字種";
EXPECT_EQ("\xe3\x81\x9d\xe3\x81\xae\xe3\x81\xbb\xe3\x81\x8b\xe3\x81\xae"
"\xe6\x96\x87\xe5\xad\x97\xe7\xa8\xae",
candidates.candidate(2).value());
}
// Test for segment motion. [SegmentFocusRight] at the tail of segments.
// http://b/2990134
// Focus changing at the tail of segments to right,
// and at the head of segments to left, should work.
{
converter.SegmentFocusRight();
EXPECT_FALSE(converter.IsCandidateListVisible());
converter.SetCandidateListVisible(true);
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(0, candidates.focused_index());
EXPECT_EQ(3, candidates.size()); // two candidates + one t13n sub list.
EXPECT_EQ(0, candidates.position());
EXPECT_EQ(kKamabokono, candidates.candidate(0).value());
// "カマボコの"
EXPECT_EQ("\xe3\x82\xab\xe3\x83\x9e\xe3\x83\x9c\xe3\x82\xb3\xe3\x81\xae",
candidates.candidate(1).value());
// "そのほかの文字種";
EXPECT_EQ("\xe3\x81\x9d\xe3\x81\xae\xe3\x81\xbb\xe3\x81\x8b\xe3\x81\xae"
"\xe6\x96\x87\xe5\xad\x97\xe7\xa8\xae",
candidates.candidate(2).value());
}
// Test for candidate motion. [CandidateNext]
{
converter.SegmentFocusRight(); // Focus to the last segment.
converter.CandidateNext(*composer_);
EXPECT_TRUE(converter.IsCandidateListVisible());
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(1, candidates.focused_index());
EXPECT_EQ(3, candidates.size()); // two candidates + one t13n sub list.
EXPECT_EQ(5, candidates.position());
// "陰謀"
EXPECT_EQ("\xe9\x99\xb0\xe8\xac\x80", candidates.candidate(0).value());
// "印房"
EXPECT_EQ("\xe5\x8d\xb0\xe6\x88\xbf", candidates.candidate(1).value());
// "そのほかの文字種";
EXPECT_EQ("\xe3\x81\x9d\xe3\x81\xae\xe3\x81\xbb\xe3\x81\x8b\xe3\x81\xae"
"\xe6\x96\x87\xe5\xad\x97\xe7\xa8\xae",
candidates.candidate(2).value());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(kKamabokono, conversion.segment(0).value());
// "印房"
EXPECT_EQ("\xe5\x8d\xb0\xe6\x88\xbf", conversion.segment(1).value());
}
// Test for segment motion again [SegmentFocusLeftEdge] [SegmentFocusLast]
// The positions of "陰謀" and "印房" should be swapped.
{
Segments fixed_segments;
SetKamaboko(&fixed_segments);
// "陰謀"
ASSERT_EQ("\xe9\x99\xb0\xe8\xac\x80",
fixed_segments.segment(1).candidate(0).value);
// "印房"
ASSERT_EQ("\xe5\x8d\xb0\xe6\x88\xbf",
fixed_segments.segment(1).candidate(1).value);
// swap the values.
fixed_segments.mutable_segment(1)->mutable_candidate(0)->value.swap(
fixed_segments.mutable_segment(1)->mutable_candidate(1)->value);
// "印房"
ASSERT_EQ("\xe5\x8d\xb0\xe6\x88\xbf",
fixed_segments.segment(1).candidate(0).value);
// "陰謀"
ASSERT_EQ("\xe9\x99\xb0\xe8\xac\x80",
fixed_segments.segment(1).candidate(1).value);
convertermock_->SetCommitSegmentValue(&fixed_segments, true);
converter.SegmentFocusLeftEdge();
EXPECT_FALSE(converter.IsCandidateListVisible());
converter.SegmentFocusLast();
EXPECT_FALSE(converter.IsCandidateListVisible());
converter.SetCandidateListVisible(true);
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(0, candidates.focused_index());
EXPECT_EQ(3, candidates.size()); // two candidates + one t13n sub list.
EXPECT_EQ(5, candidates.position());
// "印房"
EXPECT_EQ("\xe5\x8d\xb0\xe6\x88\xbf", candidates.candidate(0).value());
// "陰謀"
EXPECT_EQ("\xe9\x99\xb0\xe8\xac\x80", candidates.candidate(1).value());
// "そのほかの文字種";
EXPECT_EQ("\xe3\x81\x9d\xe3\x81\xae\xe3\x81\xbb\xe3\x81\x8b\xe3\x81\xae"
"\xe6\x96\x87\xe5\xad\x97\xe7\xa8\xae",
candidates.candidate(2).value());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(kKamabokono, conversion.segment(0).value());
// "印房"
EXPECT_EQ("\xe5\x8d\xb0\xe6\x88\xbf", conversion.segment(1).value());
}
{
converter.Commit();
composer_->Reset();
EXPECT_FALSE(converter.IsCandidateListVisible());
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_TRUE(output.has_result());
EXPECT_FALSE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Result &result = output.result();
// "かまぼこの印房"
EXPECT_EQ("\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae"
"\xe5\x8d\xb0\xe6\x88\xbf",
result.value());
// "かまぼこのいんぼう"
EXPECT_EQ("\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae"
"\xe3\x81\x84\xe3\x82\x93\xe3\x81\xbc\xe3\x81\x86",
result.key());
EXPECT_FALSE(converter.IsActive());
}
}
TEST_F(SessionConverterTest, Transliterations) {
SessionConverter converter(convertermock_.get());
// "く"
composer_->InsertCharacterKeyAndPreedit("h", "\xE3\x81\x8F");
// "ま"
composer_->InsertCharacterKeyAndPreedit("J", "\xE3\x81\xBE");
Segments segments;
{ // Initialize segments.
Segment *segment = segments.add_segment();
// "くま"
segment->set_key("\xE3\x81\x8F\xE3\x81\xBE");
// "クマー"
segment->add_candidate()->value = "\xE3\x82\xAF\xE3\x83\x9E\xE3\x83\xBC";
}
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
EXPECT_TRUE(converter.Convert(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
// Move to the t13n list.
converter.CandidateNext(*composer_);
EXPECT_TRUE(converter.IsCandidateListVisible());
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(2, candidates.size()); // one candidate + one t13n sub list.
EXPECT_EQ(1, candidates.focused_index());
// "そのほかの文字種";
EXPECT_EQ("\xe3\x81\x9d\xe3\x81\xae\xe3\x81\xbb\xe3\x81\x8b\xe3\x81\xae"
"\xe6\x96\x87\xe5\xad\x97\xe7\xa8\xae",
candidates.candidate(1).value());
vector<string> t13ns;
composer_->GetTransliterations(&t13ns);
EXPECT_TRUE(candidates.has_subcandidates());
EXPECT_EQ(t13ns.size(), candidates.subcandidates().size());
EXPECT_EQ(9, candidates.subcandidates().candidate_size());
for (size_t i = 0; i < candidates.subcandidates().candidate_size(); ++i) {
EXPECT_EQ(t13ns[i], candidates.subcandidates().candidate(i).value());
}
}
TEST_F(SessionConverterTest, T13NWithResegmentation) {
SessionConverter converter(convertermock_.get());
{
Segments segments;
Segment *segment = segments.add_segment();
Segment::Candidate *candidate;
CHECK(segment);
// "かまぼこの"
segment->set_key(
"\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae");
candidate = segment->add_candidate();
CHECK(candidate);
// "かまぼこの"
candidate->value =
"\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae";
segment = segments.add_segment();
CHECK(segment);
// "いんぼう"
segment->set_key("\xe3\x81\x84\xe3\x82\x93\xe3\x81\xbc\xe3\x81\x86");
candidate = segment->add_candidate();
CHECK(candidate);
// "いんぼう"
candidate->value =
"\xe3\x81\x84\xe3\x82\x93\xe3\x81\xbc\xe3\x81\x86";
InsertASCIISequence("kamabokonoinbou", composer_.get());
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
}
EXPECT_TRUE(converter.Convert(*composer_));
// Test for segment motion. [SegmentFocusRight]
converter.SegmentFocusRight();
// Shrink segment
{
Segments segments;
Segment *segment;
Segment::Candidate *candidate;
segments.Clear();
segment = segments.add_segment();
// "かまぼこの"
segment->set_key(
"\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae");
candidate = segment->add_candidate();
// "かまぼこの"
candidate->value =
"\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae";
candidate = segment->add_candidate();
// "カマボコの"
candidate->value =
"\xe3\x82\xab\xe3\x83\x9e\xe3\x83\x9c\xe3\x82\xb3\xe3\x81\xae";
segment = segments.add_segment();
// "いんぼ"
segment->set_key("\xe3\x81\x84\xe3\x82\x93\xe3\x81\xbc");
candidate = segment->add_candidate();
// "インボ"
candidate->value = "\xe3\x82\xa4\xe3\x83\xb3\xe3\x83\x9c";
segment = segments.add_segment();
// "う"
segment->set_key("\xe3\x81\x86");
candidate = segment->add_candidate();
// "ウ"
candidate->value = "\xe3\x82\xa6";
FillT13Ns(&segments, composer_.get());
convertermock_->SetResizeSegment1(&segments, true);
}
converter.SegmentWidthShrink();
// Convert to half katakana
converter.ConvertToTransliteration(*composer_,
transliteration::HALF_KATAKANA);
{
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
const commands::Preedit &preedit = output.preedit();
EXPECT_EQ(3, preedit.segment_size());
// "インボ"
EXPECT_EQ("\xef\xbd\xb2\xef\xbe\x9d\xef\xbe\x8e\xef\xbe\x9e",
preedit.segment(1).value());
}
}
TEST_F(SessionConverterTest, ConvertToHalfWidth) {
SessionConverter converter(convertermock_.get());
// "あ"
composer_->InsertCharacterKeyAndPreedit("a", "\xE3\x81\x82");
// "b"
composer_->InsertCharacterKeyAndPreedit("b", "\xEF\xBD\x82");
// "c"
composer_->InsertCharacterKeyAndPreedit("c", "\xEF\xBD\x83");
Segments segments;
{ // Initialize segments.
Segment *segment = segments.add_segment();
// "あbc"
segment->set_key("\xE3\x81\x82\xEF\xBD\x82\xEF\xBD\x83");
// "あべし"
segment->add_candidate()->value = "\xE3\x81\x82\xE3\x81\xB9\xE3\x81\x97";
}
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
EXPECT_TRUE(converter.ConvertToHalfWidth(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
{ // Make sure the output
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "アbc"
EXPECT_EQ("\xEF\xBD\xB1\x62\x63", conversion.segment(0).value());
}
// Composition will be transliterated to "abc".
EXPECT_TRUE(converter.ConvertToTransliteration(*composer_,
transliteration::FULL_ASCII));
{ // Make sure the output
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "abc"
EXPECT_EQ("\xEF\xBD\x81\xEF\xBD\x82\xEF\xBD\x83",
conversion.segment(0).value());
}
EXPECT_TRUE(converter.ConvertToHalfWidth(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
{ // Make sure the output
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "abc"
EXPECT_EQ("abc", conversion.segment(0).value());
}
}
TEST_F(SessionConverterTest, ConvertToHalfWidth_2) {
// http://b/2517514
// ConvertToHalfWidth converts punctuations differently w/ or w/o kana.
SessionConverter converter(convertermock_.get());
// "q"
composer_->InsertCharacterKeyAndPreedit("q", "\xef\xbd\x91");
// "、"
composer_->InsertCharacterKeyAndPreedit(",", "\xe3\x80\x81");
// "。"
composer_->InsertCharacterKeyAndPreedit(".", "\xe3\x80\x82");
Segments segments;
{ // Initialize segments.
Segment *segment = segments.add_segment();
// "q、。"
segment->set_key("\xef\xbd\x91\xe3\x80\x81\xe3\x80\x82");
segment->add_candidate()->value = "q,.";
// "q、。"
segment->add_candidate()->value = "q\xef\xbd\xa4\xef\xbd\xa1";
}
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
EXPECT_TRUE(converter.ConvertToHalfWidth(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
{ // Make sure the output
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "q、。"
EXPECT_EQ("q\xef\xbd\xa4\xef\xbd\xa1", conversion.segment(0).value());
}
}
TEST_F(SessionConverterTest, SwitchKanaType) {
{ // From composition mode.
SessionConverter converter(convertermock_.get());
// "あ"
composer_->InsertCharacterKeyAndPreedit("a", "\xE3\x81\x82");
// "b"
composer_->InsertCharacterKeyAndPreedit("b", "\xEF\xBD\x82");
// "c"
composer_->InsertCharacterKeyAndPreedit("c", "\xEF\xBD\x83");
Segments segments;
{ // Initialize segments.
Segment *segment = segments.add_segment();
// "あbc"
segment->set_key("\xE3\x81\x82\xEF\xBD\x82\xEF\xBD\x83");
// "あべし"
segment->add_candidate()->value = "\xE3\x81\x82\xE3\x81\xB9\xE3\x81\x97";
}
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
EXPECT_TRUE(converter.SwitchKanaType(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
{ // Make sure the output
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "アbc"
EXPECT_EQ("\xE3\x82\xA2\xEF\xBD\x82\xEF\xBD\x83",
conversion.segment(0).value());
}
EXPECT_TRUE(converter.SwitchKanaType(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
{
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "アbc"
EXPECT_EQ("\xEF\xBD\xB1\x62\x63", conversion.segment(0).value());
}
EXPECT_TRUE(converter.SwitchKanaType(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
{
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "あbc"
EXPECT_EQ("\xE3\x81\x82\xEF\xBD\x82\xEF\xBD\x83",
conversion.segment(0).value());
}
}
{ // From conversion mode
SessionConverter converter(convertermock_.get());
composer_->EditErase();
// "か"
composer_->InsertCharacterKeyAndPreedit("ka", "\xE3\x81\x8B");
// "ん"
composer_->InsertCharacterKeyAndPreedit("n", "\xE3\x82\x93");
// "じ"
composer_->InsertCharacterKeyAndPreedit("ji", "\xE3\x81\x98");
Segments segments;
{ // Initialize segments.
Segment *segment = segments.add_segment();
// "かんじ"
segment->set_key("\xE3\x81\x8B\xE3\x82\x93\xE3\x81\x98");
// "漢字"
segment->add_candidate()->value = "\xE6\xBC\xA2\xE5\xAD\x97";
}
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
EXPECT_TRUE(converter.Convert(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
{ // Make sure the output
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "漢字"
EXPECT_EQ("\xE6\xBC\xA2\xE5\xAD\x97",
conversion.segment(0).value());
}
EXPECT_TRUE(converter.SwitchKanaType(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
{
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "かんじ"
EXPECT_EQ("\xE3\x81\x8B\xE3\x82\x93\xE3\x81\x98",
conversion.segment(0).value());
}
EXPECT_TRUE(converter.SwitchKanaType(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
{
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "カンジ"
EXPECT_EQ("\xE3\x82\xAB\xE3\x83\xB3\xE3\x82\xB8",
conversion.segment(0).value());
}
EXPECT_TRUE(converter.SwitchKanaType(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
{
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "カンジ"
EXPECT_EQ("\xEF\xBD\xB6\xEF\xBE\x9D\xEF\xBD\xBC\xEF\xBE\x9E",
conversion.segment(0).value());
}
EXPECT_TRUE(converter.SwitchKanaType(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
{
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "かんじ"
EXPECT_EQ("\xE3\x81\x8B\xE3\x82\x93\xE3\x81\x98",
conversion.segment(0).value());
}
}
}
TEST_F(SessionConverterTest, CommitFirstSegment) {
SessionConverter converter(convertermock_.get());
Segments segments;
SetKamaboko(&segments);
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
const string kKamabokono =
"\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae";
const string kInbou =
"\xe3\x81\x84\xe3\x82\x93\xe3\x81\xbc\xe3\x81\x86";
// "かまぼこのいんぼう"
composer_->InsertCharacterPreedit(kKamabokono + kInbou);
EXPECT_TRUE(converter.Convert(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
{ // Check the conversion before CommitFirstSegment.
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
// "かまぼこの"
EXPECT_EQ(kKamabokono, conversion.segment(0).value());
// "陰謀"
EXPECT_EQ("\xe9\x99\xb0\xe8\xac\x80", conversion.segment(1).value());
}
{ // Initialization of SetSubmitFirstSegment.
Segments segments_after_submit;
Segment *segment = segments_after_submit.add_segment();
// "いんぼう"
segment->set_key("\xe3\x81\x84\xe3\x82\x93\xe3\x81\xbc\xe3\x81\x86");
segment->add_candidate()->value = "\xe9\x99\xb0\xe8\xac\x80"; // "陰謀"
segment->add_candidate()->value = "\xe5\x8d\xb0\xe6\x88\xbf"; // "印房"
convertermock_->SetSubmitFirstSegment(&segments_after_submit, true);
}
size_t size;
converter.CommitFirstSegment(&size);
EXPECT_FALSE(converter.IsCandidateListVisible());
// "かまぼこの"
EXPECT_EQ(Util::CharsLen(kKamabokono), size);
EXPECT_TRUE(converter.IsActive());
}
TEST_F(SessionConverterTest, CommitPreedit) {
SessionConverter converter(convertermock_.get());
composer_->InsertCharacterPreedit(aiueo_);
converter.CommitPreedit(*composer_);
composer_->Reset();
EXPECT_FALSE(converter.IsCandidateListVisible());
{ // Check the result
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_TRUE(output.has_result());
EXPECT_FALSE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Result &result = output.result();
EXPECT_EQ(aiueo_, result.value());
EXPECT_EQ(aiueo_, result.key());
}
EXPECT_FALSE(converter.IsActive());
}
TEST_F(SessionConverterTest, CommitSuggestionByIndex) {
SessionConverter converter(convertermock_.get());
Segments segments;
{ // Initialize mock segments for suggestion
segments.set_request_type(Segments::SUGGESTION);
Segment *segment = segments.add_segment();
Segment::Candidate *candidate;
// "も"
segment->set_key(kChars_Mo);
candidate = segment->add_candidate();
// "もずくす"
candidate->value = kChars_Mozukusu;
candidate->content_key = kChars_Mozukusu;
candidate = segment->add_candidate();
// "ももんが"
candidate->value = kChars_Momonga;
candidate->content_key = kChars_Momonga;
}
composer_->InsertCharacterPreedit(kChars_Mo);
// Suggestion
convertermock_->SetStartSuggestionWithComposer(&segments, true);
EXPECT_TRUE(converter.Suggest(*composer_));
EXPECT_TRUE(converter.IsCandidateListVisible());
EXPECT_TRUE(converter.IsActive());
{ // Check the candidate list
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Preedit &preedit = output.preedit();
EXPECT_EQ(1, preedit.segment_size());
EXPECT_EQ(kChars_Mo, preedit.segment(0).value());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(2, candidates.size());
EXPECT_EQ(kChars_Mozukusu, candidates.candidate(0).value());
EXPECT_FALSE(candidates.has_focused_index());
}
// FinishConversion is expected to return empty Segments.
convertermock_->SetFinishConversion(
scoped_ptr<Segments>(new Segments).get(), true);
size_t committed_key_size = 0;
converter.CommitSuggestionByIndex(0, *composer_.get(), &committed_key_size);
composer_->Reset();
EXPECT_FALSE(converter.IsCandidateListVisible());
EXPECT_FALSE(converter.IsActive());
EXPECT_EQ(4, committed_key_size);
{ // Check the result
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_TRUE(output.has_result());
EXPECT_FALSE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Result &result = output.result();
EXPECT_EQ(kChars_Mozukusu, result.value());
EXPECT_EQ(kChars_Mozukusu, result.key());
EXPECT_EQ(SessionConverterInterface::COMPOSITION,
converter.GetState());
}
}
TEST_F(SessionConverterTest, SuggestAndPredict) {
SessionConverter converter(convertermock_.get());
Segments segments;
{ // Initialize mock segments for suggestion
segments.set_request_type(Segments::SUGGESTION);
Segment *segment = segments.add_segment();
Segment::Candidate *candidate;
// "も"
segment->set_key(kChars_Mo);
candidate = segment->add_candidate();
// "もずくす"
candidate->value = kChars_Mozukusu;
candidate->content_key = kChars_Mozukusu;
candidate = segment->add_candidate();
// "ももんが"
candidate->value = kChars_Momonga;
candidate->content_key = kChars_Momonga;
}
composer_->InsertCharacterPreedit(kChars_Mo);
// Suggestion
convertermock_->SetStartSuggestionWithComposer(&segments, true);
EXPECT_TRUE(converter.Suggest(*composer_));
EXPECT_TRUE(converter.IsCandidateListVisible());
EXPECT_TRUE(converter.IsActive());
{ // Check the candidate list
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
EXPECT_TRUE(output.candidates().has_footer());
#ifdef CHANNEL_DEV
EXPECT_FALSE(output.candidates().footer().has_label());
EXPECT_TRUE(output.candidates().footer().has_sub_label());
#else // CHANNEL_DEV
EXPECT_TRUE(output.candidates().footer().has_label());
EXPECT_FALSE(output.candidates().footer().has_sub_label());
#endif // CHANNEL_DEV
EXPECT_FALSE(output.candidates().footer().index_visible());
EXPECT_FALSE(output.candidates().footer().logo_visible());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(2, candidates.size());
// "もずくす"
EXPECT_EQ(kChars_Mozukusu, candidates.candidate(0).value());
EXPECT_FALSE(candidates.has_focused_index());
}
segments.Clear();
{ // Initialize mock segments for prediction
segments.set_request_type(Segments::PREDICTION);
Segment *segment = segments.add_segment();
Segment::Candidate *candidate;
// "も"
segment->set_key(kChars_Mo);
candidate = segment->add_candidate();
// "もずく"
candidate->value = kChars_Mozuku;
candidate->content_key = kChars_Mozuku;
candidate = segment->add_candidate();
// "ももんが"
candidate->value = kChars_Momonga;
candidate->content_key = kChars_Momonga;
candidate = segment->add_candidate();
// "モンドリアン"
candidate->value = "\xE3\x83\xA2\xE3\x83\xB3\xE3\x83\x89"
"\xE3\x83\xAA\xE3\x82\xA2\xE3\x83\xB3";
// "もんどりあん"
candidate->content_key = "\xE3\x82\x82\xE3\x82\x93\xE3\x81\xA9"
"\xE3\x82\x8A\xE3\x81\x82\xE3\x82\x93";
}
// Prediction
convertermock_->SetStartPredictionWithComposer(&segments, true);
EXPECT_TRUE(converter.Predict(*composer_));
EXPECT_TRUE(converter.IsCandidateListVisible());
EXPECT_TRUE(converter.IsActive());
// If there are suggestion results, the Prediction is not triggered.
{
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
EXPECT_FALSE(output.candidates().footer().has_label());
EXPECT_TRUE(output.candidates().footer().index_visible());
EXPECT_TRUE(output.candidates().footer().logo_visible());
// Check the conversion
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
EXPECT_EQ(kChars_Mozukusu, conversion.segment(0).value());
// Check the candidate list
const commands::Candidates &candidates = output.candidates();
// Candidates should be the same as suggestion
EXPECT_EQ(2, candidates.size());
EXPECT_EQ(kChars_Mozukusu, candidates.candidate(0).value());
EXPECT_EQ(kChars_Momonga, candidates.candidate(1).value());
EXPECT_TRUE(candidates.has_focused_index());
EXPECT_EQ(0, candidates.focused_index());
}
// Prediction is called
converter.CandidateNext(*composer_);
converter.CandidateNext(*composer_);
{ // Check the candidate list
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
// Candidates should be merged with the previous suggestions.
EXPECT_EQ(4, candidates.size());
// "もずくす"
EXPECT_EQ(kChars_Mozukusu, candidates.candidate(0).value());
// "ももんが"
EXPECT_EQ(kChars_Momonga, candidates.candidate(1).value());
// "もずく"
EXPECT_EQ(kChars_Mozuku, candidates.candidate(2).value());
// "モンドリアン"
EXPECT_EQ("\xE3\x83\xA2\xE3\x83\xB3\xE3\x83\x89"
"\xE3\x83\xAA\xE3\x82\xA2\xE3\x83\xB3",
candidates.candidate(3).value());
EXPECT_TRUE(candidates.has_focused_index());
}
// Select to "モンドリアン".
converter.CandidateNext(*composer_);
converter.Commit();
composer_->Reset();
{ // Check the submitted value
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_TRUE(output.has_result());
EXPECT_FALSE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Result &result = output.result();
// "モンドリアン"
EXPECT_EQ("\xE3\x83\xA2\xE3\x83\xB3\xE3\x83\x89"
"\xE3\x83\xAA\xE3\x82\xA2\xE3\x83\xB3",
result.value());
// "もんどりあん"
EXPECT_EQ("\xE3\x82\x82\xE3\x82\x93\xE3\x81\xA9"
"\xE3\x82\x8A\xE3\x81\x82\xE3\x82\x93",
result.key());
}
segments.Clear();
{ // Initialize mock segments for prediction
segments.set_request_type(Segments::PREDICTION);
Segment *segment = segments.add_segment();
Segment::Candidate *candidate;
// "も"
segment->set_key(kChars_Mo);
candidate = segment->add_candidate();
// "もずく"
candidate->value = kChars_Mozuku;
candidate->content_key = kChars_Mozuku;
candidate = segment->add_candidate();
// "ももんが"
candidate->value = kChars_Momonga;
candidate->content_key = kChars_Momonga;
candidate = segment->add_candidate();
// "モンドリアン"
candidate->value = "\xE3\x83\xA2\xE3\x83\xB3\xE3\x83\x89"
"\xE3\x83\xAA\xE3\x82\xA2\xE3\x83\xB3";
// "もんどりあん"
candidate->content_key = "\xE3\x82\x82\xE3\x82\x93\xE3\x81\xA9"
"\xE3\x82\x8A\xE3\x81\x82\xE3\x82\x93";
}
// Prediction without suggestion.
convertermock_->SetStartPredictionWithComposer(&segments, true);
EXPECT_TRUE(converter.Predict(*composer_));
EXPECT_TRUE(converter.IsActive());
{
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
// Check the conversion
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
// "もずく"
EXPECT_EQ(kChars_Mozuku, conversion.segment(0).value());
// Check the candidate list
const commands::Candidates &candidates = output.candidates();
// Candidates should NOT be merged with the previous suggestions.
EXPECT_EQ(3, candidates.size());
// "もずく"
EXPECT_EQ(kChars_Mozuku, candidates.candidate(0).value());
// "ももんが"
EXPECT_EQ(kChars_Momonga, candidates.candidate(1).value());
// "モンドリアン"
EXPECT_EQ("\xE3\x83\xA2\xE3\x83\xB3\xE3\x83\x89"
"\xE3\x83\xAA\xE3\x82\xA2\xE3\x83\xB3",
candidates.candidate(2).value());
EXPECT_TRUE(candidates.has_focused_index());
}
}
TEST_F(SessionConverterTest, SuppressSuggestionOnPasswordField) {
SessionConverter converter(convertermock_.get());
Segments segments;
{ // Initialize mock segments for suggestion
segments.set_request_type(Segments::SUGGESTION);
Segment *segment = segments.add_segment();
Segment::Candidate *candidate;
// "も"
segment->set_key(kChars_Mo);
candidate = segment->add_candidate();
// "もずくす"
candidate->value = kChars_Mozukusu;
candidate->content_key = kChars_Mozukusu;
candidate = segment->add_candidate();
// "ももんが"
candidate->value = kChars_Momonga;
candidate->content_key = kChars_Momonga;
}
composer_->SetInputFieldType(commands::SessionCommand::PASSWORD);
composer_->InsertCharacterPreedit(kChars_Mo);
// Suggestion
convertermock_->SetStartSuggestionWithComposer(&segments, true);
// No candidates should be visible because we are on password field.
EXPECT_FALSE(converter.Suggest(*composer_));
EXPECT_FALSE(converter.IsCandidateListVisible());
EXPECT_FALSE(converter.IsActive());
}
TEST_F(SessionConverterTest, ExpandSuggestion) {
SessionConverter converter(convertermock_.get());
const char *kSuggestionValues[] = {
"S0",
"S1",
"S2",
};
const char *kPredictionValues[] = {
"P0",
"P1",
"P2",
// Duplicate entry. Any dupulication should not exist
// in the candidate list.
"S1",
"P3",
};
const char kKey[] = "key";
const int kDupulicationIndex = 3;
Segments segments;
{ // Initialize mock segments for suggestion
segments.set_request_type(Segments::SUGGESTION);
Segment *segment = segments.add_segment();
Segment::Candidate *candidate;
segment->set_key(kKey);
for (size_t i = 0; i < arraysize(kSuggestionValues); ++i) {
candidate = segment->add_candidate();
candidate->value = kSuggestionValues[i];
candidate->content_key = kKey;
}
}
composer_->InsertCharacterPreedit(kKey);
// Suggestion
convertermock_->SetStartSuggestionWithComposer(&segments, true);
EXPECT_TRUE(converter.Suggest(*composer_));
EXPECT_TRUE(converter.IsCandidateListVisible());
EXPECT_TRUE(converter.IsActive());
{ // Check the candidate list
commands::Output output;
converter.FillOutput(*composer_, &output);
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(commands::SUGGESTION, candidates.category());
EXPECT_EQ(commands::SUGGESTION, output.all_candidate_words().category());
EXPECT_EQ(arraysize(kSuggestionValues), candidates.size());
for (size_t i = 0; i < arraysize(kSuggestionValues); ++i) {
EXPECT_EQ(kSuggestionValues[i], candidates.candidate(i).value());
}
}
segments.Clear();
{ // Initialize mock segments for prediction (== expanding suggestion)
segments.set_request_type(Segments::PREDICTION);
Segment *segment = segments.add_segment();
Segment::Candidate *candidate;
segment->set_key(kKey);
for (size_t i = 0; i < arraysize(kPredictionValues); ++i) {
candidate = segment->add_candidate();
candidate->value = kPredictionValues[i];
candidate->content_key = kKey;
}
}
// Expand suggestion candidate
convertermock_->SetStartPredictionWithComposer(&segments, true);
EXPECT_TRUE(converter.ExpandSuggestion(*composer_));
EXPECT_TRUE(converter.IsCandidateListVisible());
EXPECT_TRUE(converter.IsActive());
{ // Check the candidate list
commands::Output output;
converter.FillOutput(*composer_, &output);
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(commands::SUGGESTION, candidates.category());
EXPECT_EQ(commands::SUGGESTION, output.all_candidate_words().category());
// -1 is for duplicate entry.
EXPECT_EQ(arraysize(kSuggestionValues) + arraysize(kPredictionValues) - 1,
candidates.size());
size_t i;
for (i = 0; i < arraysize(kSuggestionValues); ++i) {
EXPECT_EQ(kSuggestionValues[i], candidates.candidate(i).value());
}
// -1 is for duplicate entry.
for (; i < candidates.size(); ++i) {
size_t index_in_prediction = i - arraysize(kSuggestionValues);
if (index_in_prediction >= kDupulicationIndex) {
++index_in_prediction;
}
EXPECT_EQ(kPredictionValues[index_in_prediction],
candidates.candidate(i).value());
}
}
}
TEST_F(SessionConverterTest, AppendCandidateList) {
SessionConverter converter(convertermock_.get());
converter.state_ = SessionConverterInterface::CONVERSION;
converter.operation_preferences_.use_cascading_window = true;
Segments segments;
{
SetAiueo(&segments);
FillT13Ns(&segments, composer_.get());
converter.SetSegments(segments);
converter.AppendCandidateList();
const CandidateList &candidate_list = converter.GetCandidateList();
// 3 == hiragana cand, katakana cand and sub candidate list.
EXPECT_EQ(3, candidate_list.size());
size_t sub_cand_list_count = 0;
for (size_t i = 0; i < candidate_list.size(); ++i) {
if (candidate_list.candidate(i).IsSubcandidateList()) {
++sub_cand_list_count;
}
}
// Sub candidate list for T13N.
EXPECT_EQ(1, sub_cand_list_count);
}
{
Segment *segment = segments.mutable_conversion_segment(0);
Segment::Candidate *candidate = segment->add_candidate();
// "あいうえお_2"
candidate->value =
"\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a_2";
// New meta candidates.
// They should be ignored.
vector<Segment::Candidate> *meta_candidates =
segment->mutable_meta_candidates();
meta_candidates->clear();
meta_candidates->resize(1);
meta_candidates->at(0).Init();
meta_candidates->at(0).value = "t13nValue";
meta_candidates->at(0).content_value = "t13nValue";
meta_candidates->at(0).content_key = segment->key();
converter.SetSegments(segments);
converter.AppendCandidateList();
const CandidateList &candidate_list = converter.GetCandidateList();
// 4 == hiragana cand, katakana cand, hiragana cand2
// and sub candidate list.
EXPECT_EQ(4, candidate_list.size());
size_t sub_cand_list_count = 0;
set<int> id_set;
for (size_t i = 0; i < candidate_list.size(); ++i) {
if (candidate_list.candidate(i).IsSubcandidateList()) {
++sub_cand_list_count;
} else {
// No duplicate ids are expected.
int id = candidate_list.candidate(i).id();
// EXPECT_EQ(iterator, iterator) might cause compile error in specific
// environment.
EXPECT_TRUE(id_set.end() == id_set.find(id));
id_set.insert(id);
}
}
// Sub candidate list shouldn't be duplicated.
EXPECT_EQ(1, sub_cand_list_count);
}
}
TEST_F(SessionConverterTest, ReloadConfig) {
SessionConverter converter(convertermock_.get());
Segments segments;
SetAiueo(&segments);
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
composer_->InsertCharacterPreedit("aiueo");
EXPECT_TRUE(converter.Convert(*composer_));
converter.SetCandidateListVisible(true);
{ // Set OperationPreferences
const char kShortcut123456789[] = "123456789";
OperationPreferences preferences;
preferences.use_cascading_window = false;
preferences.candidate_shortcuts = kShortcut123456789;
converter.SetOperationPreferences(preferences);
EXPECT_TRUE(converter.IsCandidateListVisible());
}
{ // Check the config update
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ("1", candidates.candidate(0).annotation().shortcut());
EXPECT_EQ("2", candidates.candidate(1).annotation().shortcut());
}
{ // Set OperationPreferences #2
OperationPreferences preferences;
preferences.use_cascading_window = false;
preferences.candidate_shortcuts = "";
converter.SetOperationPreferences(preferences);
}
{ // Check the config update
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
EXPECT_TRUE(candidates.candidate(0).annotation().shortcut().empty());
EXPECT_TRUE(candidates.candidate(1).annotation().shortcut().empty());
}
}
TEST_F(SessionConverterTest, OutputAllCandidateWords) {
SessionConverter converter(convertermock_.get());
Segments segments;
SetKamaboko(&segments);
// "かまぼこの"
const string kKamabokono =
"\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae";
// "いんぼう"
const string kInbou =
"\xe3\x81\x84\xe3\x82\x93\xe3\x81\xbc\xe3\x81\x86";
composer_->InsertCharacterPreedit(kKamabokono + kInbou);
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
commands::Output output;
EXPECT_TRUE(converter.Convert(*composer_));
{
ASSERT_TRUE(converter.IsActive());
EXPECT_FALSE(converter.IsCandidateListVisible());
output.Clear();
converter.PopOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
EXPECT_TRUE(output.has_all_candidate_words());
EXPECT_EQ(0, output.all_candidate_words().focused_index());
EXPECT_EQ(commands::CONVERSION, output.all_candidate_words().category());
// [ "かまぼこの", "カマボコの", "カマボコノ" (t13n), "かまぼこの" (t13n),
// "カマボコノ" (t13n) ]
EXPECT_EQ(5, output.all_candidate_words().candidates_size());
}
converter.CandidateNext(*composer_);
{
ASSERT_TRUE(converter.IsActive());
EXPECT_TRUE(converter.IsCandidateListVisible());
output.Clear();
converter.PopOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
EXPECT_TRUE(output.has_all_candidate_words());
EXPECT_EQ(1, output.all_candidate_words().focused_index());
EXPECT_EQ(commands::CONVERSION, output.all_candidate_words().category());
// [ "かまぼこの", "カマボコの", "カマボコノ" (t13n), "かまぼこの" (t13n),
// "カマボコノ" (t13n) ]
EXPECT_EQ(5, output.all_candidate_words().candidates_size());
}
converter.SegmentFocusRight();
{
ASSERT_TRUE(converter.IsActive());
EXPECT_FALSE(converter.IsCandidateListVisible());
output.Clear();
converter.PopOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
EXPECT_TRUE(output.has_all_candidate_words());
EXPECT_EQ(0, output.all_candidate_words().focused_index());
EXPECT_EQ(commands::CONVERSION, output.all_candidate_words().category());
// [ "陰謀", "印房", "インボウ" (t13n), "いんぼう" (t13n), "インボウ" (t13n) ]
EXPECT_EQ(5, output.all_candidate_words().candidates_size());
}
}
TEST_F(SessionConverterTest, FillContext) {
SessionConverter converter(convertermock_.get());
Segments segments;
// Set history segments.
// "車で", "行く"
const string kHistoryInput[] = {
"\xE8\xBB\x8A\xE3\x81\xA7",
"\xE8\xA1\x8C\xE3\x81\x8F"
};
for (size_t i = 0; i < arraysize(kHistoryInput); ++i) {
Segment *segment = segments.add_segment();
segment->set_segment_type(Segment::HISTORY);
Segment::Candidate *candidate = segment->add_candidate();
candidate->value = kHistoryInput[i];
}
convertermock_->SetFinishConversion(&segments, true);
converter.CommitPreedit(*composer_);
// FillContext must fill concatenation of values of history segments into
// preceding_text.
commands::Context context;
converter.FillContext(&context);
EXPECT_TRUE(context.has_preceding_text());
EXPECT_EQ(kHistoryInput[0] + kHistoryInput[1], context.preceding_text());
// If preceding text has been set already, do not overwrite it.
// "自動車で行く"
const char kPrecedingText[] = "\xE8\x87\xAA\xE5\x8B\x95\xE8\xBB\x8A"
"\xE3\x81\xA7\xE8\xA1\x8C\xE3\x81\x8F";
context.set_preceding_text(kPrecedingText);
converter.FillContext(&context);
EXPECT_EQ(kPrecedingText, context.preceding_text());
}
TEST_F(SessionConverterTest, GetPreeditAndGetConversion) {
Segments segments;
Segment *segment;
Segment::Candidate *candidate;
segment = segments.add_segment();
segment->set_segment_type(Segment::HISTORY);
segment->set_key("[key:history1]");
candidate = segment->add_candidate();
candidate->content_key = "[content_key:history1-1]";
candidate = segment->add_candidate();
candidate->content_key = "[content_key:history1-2]";
segment = segments.add_segment();
segment->set_segment_type(Segment::FREE);
segment->set_key("[key:conversion1]");
candidate = segment->add_candidate();
candidate->key = "[key:conversion1-1]";
candidate->content_key = "[content_key:conversion1-1]";
candidate->value = "[value:conversion1-1]";
candidate = segment->add_candidate();
candidate->key = "[key:conversion1-2]";
candidate->content_key = "[content_key:conversion1-2]";
candidate->value = "[value:conversion1-2]";
{
// PREDICTION
SessionConverter converter(convertermock_.get());
convertermock_->SetStartPredictionWithComposer(&segments, true);
converter.Predict(*composer_);
converter.CandidateNext(*composer_);
string preedit;
converter.GetPreedit(0, 1, &preedit);
EXPECT_EQ("[content_key:conversion1-2]", preedit);
string conversion;
converter.GetConversion(0, 1, &conversion);
EXPECT_EQ("[value:conversion1-2]", conversion);
}
{
// SUGGESTION
SessionConverter converter(convertermock_.get());
convertermock_->SetStartSuggestionWithComposer(&segments, true);
converter.Suggest(*composer_);
string preedit;
converter.GetPreedit(0, 1, &preedit);
EXPECT_EQ("[content_key:conversion1-1]", preedit);
string conversion;
converter.GetConversion(0, 1, &conversion);
EXPECT_EQ("[value:conversion1-1]", conversion);
}
segment = segments.add_segment();
segment->set_segment_type(Segment::FREE);
segment->set_key("[key:conversion2]");
candidate = segment->add_candidate();
candidate->key = "[key:conversion2-1]";
candidate->content_key = "[content_key:conversion2-1]";
candidate->value = "[value:conversion2-1]";
candidate = segment->add_candidate();
candidate->key = "[key:conversion2-2]";
candidate->content_key = "[content_key:conversion2-2]";
candidate->value = "[value:conversion2-2]";
{
// CONVERSION
SessionConverter converter(convertermock_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
converter.Convert(*composer_);
converter.CandidateNext(*composer_);
string preedit;
converter.GetPreedit(0, 2, &preedit);
EXPECT_EQ("[key:conversion1][key:conversion2]", preedit);
string conversion;
converter.GetConversion(0, 2, &conversion);
EXPECT_EQ("[value:conversion1-2][value:conversion2-1]", conversion);
}
}
TEST_F(SessionConverterTest, GetAndSetSegments) {
SessionConverter converter(convertermock_.get());
Segments segments;
// Set history segments.
// "車で", "行く"
const string kHistoryInput[] = {
"\xE8\xBB\x8A\xE3\x81\xA7",
"\xE8\xA1\x8C\xE3\x81\x8F"
};
for (size_t i = 0; i < arraysize(kHistoryInput); ++i) {
Segment *segment = segments.add_segment();
segment->set_segment_type(Segment::HISTORY);
Segment::Candidate *candidate = segment->add_candidate();
candidate->value = kHistoryInput[i];
}
convertermock_->SetFinishConversion(&segments, true);
converter.CommitPreedit(*composer_);
Segments src;
converter.GetSegments(&src);
ASSERT_EQ(2, src.history_segments_size());
// "車で"
EXPECT_EQ("\xE8\xBB\x8A\xE3\x81\xA7",
src.history_segment(0).candidate(0).value);
// "行く"
EXPECT_EQ("\xE8\xA1\x8C\xE3\x81\x8F",
src.history_segment(1).candidate(0).value);
// "歩いて"
src.mutable_history_segment(0)->mutable_candidate(0)->value
= "\xE6\xAD\xA9\xE3\x81\x84\xE3\x81\xA6";
Segment *segment = src.add_segment();
segment->set_segment_type(Segment::FREE);
Segment::Candidate *candidate = segment->add_candidate();
candidate->value = "?";
converter.SetSegments(src);
Segments dest;
converter.GetSegments(&dest);
ASSERT_EQ(2, dest.history_segments_size());
ASSERT_EQ(1, dest.conversion_segments_size());
// "歩いて"
EXPECT_EQ(src.history_segment(0).candidate(0).value,
dest.history_segment(0).candidate(0).value);
// "行く"
EXPECT_EQ(src.history_segment(1).candidate(0).value,
dest.history_segment(1).candidate(0).value);
// "?"
EXPECT_EQ(src.history_segment(2).candidate(0).value,
dest.history_segment(2).candidate(0).value);
}
TEST_F(SessionConverterTest, CopyFrom) {
SessionConverter src(convertermock_.get());
// "かまぼこの"
const string kKamabokono =
"\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae";
// "いんぼう"
const string kInbou =
"\xe3\x81\x84\xe3\x82\x93\xe3\x81\xbc\xe3\x81\x86";
// "陰謀"
const string kInbouKanji = "\xE9\x99\xB0\xE8\xAC\x80";
const char *kShortcut = "987654321";
{ // create source converter
Segments segments;
SetKamaboko(&segments);
convertermock_->SetStartConversionWithComposer(&segments, true);
OperationPreferences operation_preferences;
operation_preferences.use_cascading_window = false;
operation_preferences.candidate_shortcuts = kShortcut;
src.SetOperationPreferences(operation_preferences);
}
{ // validation
// Copy and validate
SessionConverter dest(convertermock_.get());
dest.CopyFrom(src);
ExpectSameSessionConverter(src, dest);
// Convert source
EXPECT_TRUE(src.Convert(*composer_));
EXPECT_TRUE(src.IsActive());
// Convert destination and validate
EXPECT_TRUE(dest.Convert(*composer_));
ExpectSameSessionConverter(src, dest);
// Copy converted and validate
dest.CopyFrom(src);
ExpectSameSessionConverter(src, dest);
}
}
// Suggest() in the suggestion state was not accepted. (http://b/1948334)
TEST_F(SessionConverterTest, Issue1948334) {
SessionConverter converter(convertermock_.get());
Segments segments;
{ // Initialize mock segments for the first suggestion
segments.set_request_type(Segments::SUGGESTION);
Segment *segment = segments.add_segment();
Segment::Candidate *candidate;
// "も"
segment->set_key(kChars_Mo);
candidate = segment->add_candidate();
// "もずくす"
candidate->value = kChars_Mozukusu;
candidate->content_key = kChars_Mozukusu;
candidate = segment->add_candidate();
// "ももんが"
candidate->value = kChars_Momonga;
candidate->content_key = kChars_Momonga;
}
composer_->InsertCharacterPreedit(kChars_Mo);
// Suggestion
convertermock_->SetStartSuggestionWithComposer(&segments, true);
EXPECT_TRUE(converter.Suggest(*composer_));
EXPECT_TRUE(converter.IsActive());
segments.Clear();
{ // Initialize mock segments for the second suggestion
segments.set_request_type(Segments::SUGGESTION);
Segment *segment = segments.add_segment();
Segment::Candidate *candidate;
// "もず"
segment->set_key("\xE3\x82\x82\xE3\x81\x9A");
candidate = segment->add_candidate();
// "もずくす"
candidate->value = kChars_Mozukusu;
candidate->content_key = kChars_Mozukusu;
}
composer_->InsertCharacterPreedit("\xE3\x82\x82\xE3\x81\x9A");
// Suggestion
convertermock_->SetStartSuggestionWithComposer(&segments, true);
EXPECT_TRUE(converter.Suggest(*composer_));
EXPECT_TRUE(converter.IsActive());
{ // Check the candidate list
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
// Candidates should be merged with the previous suggestions.
EXPECT_EQ(1, candidates.size());
// "もずくす"
EXPECT_EQ(kChars_Mozukusu, candidates.candidate(0).value());
EXPECT_FALSE(candidates.has_focused_index());
}
}
TEST_F(SessionConverterTest, Issue1960362) {
// Testcase against http://b/1960362, a candidate list was not
// updated when ConvertToTransliteration changed the size of segments.
table_->AddRule("zyu", "ZYU", "");
table_->AddRule("jyu", "ZYU", "");
table_->AddRule("tt", "XTU", "t");
table_->AddRule("ta", "TA", "");
composer_->InsertCharacter("j");
composer_->InsertCharacter("y");
composer_->InsertCharacter("u");
composer_->InsertCharacter("t");
SessionConverter converter(convertermock_.get());
Segments segments;
{
segments.set_request_type(Segments::CONVERSION);
Segment *segment;
Segment::Candidate *candidate;
segment = segments.add_segment();
segment->set_key("ZYU");
candidate = segment->add_candidate();
candidate->value = "[ZYU]";
candidate->content_key = "[ZYU]";
segment = segments.add_segment();
segment->set_key("t");
candidate = segment->add_candidate();
candidate->value = "[t]";
candidate->content_key = "[t]";
}
Segments resized_segments;
{
resized_segments.set_request_type(Segments::CONVERSION);
Segment *segment = resized_segments.add_segment();
Segment::Candidate *candidate;
segment->set_key("ZYUt");
candidate = segment->add_candidate();
candidate->value = "[ZYUt]";
candidate->content_key = "[ZYUt]";
}
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
FillT13Ns(&resized_segments, composer_.get());
convertermock_->SetResizeSegment1(&resized_segments, true);
EXPECT_TRUE(converter.ConvertToTransliteration(*composer_,
transliteration::HALF_ASCII));
EXPECT_FALSE(converter.IsCandidateListVisible());
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ("jyut", conversion.segment(0).value());
}
TEST_F(SessionConverterTest, Issue1978201) {
// This is a unittest against http://b/1978201
SessionConverter converter(convertermock_.get());
Segments segments;
composer_->InsertCharacterPreedit(kChars_Mo);
{ // Initialize mock segments for prediction
segments.set_request_type(Segments::PREDICTION);
Segment *segment = segments.add_segment();
Segment::Candidate *candidate;
// "も"
segment->set_key(kChars_Mo);
candidate = segment->add_candidate();
// "もずく"
candidate->value = kChars_Mozuku;
candidate->content_key = kChars_Mozuku;
candidate = segment->add_candidate();
// "ももんが"
candidate->value = kChars_Momonga;
candidate->content_key = kChars_Momonga;
}
// Prediction
convertermock_->SetStartPredictionWithComposer(&segments, true);
EXPECT_TRUE(converter.Predict(*composer_));
EXPECT_TRUE(converter.IsActive());
{ // Check the conversion
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
EXPECT_EQ(kChars_Mozuku, conversion.segment(0).value());
}
// Meaningless segment manipulations.
converter.SegmentWidthShrink();
converter.SegmentFocusLeft();
converter.SegmentFocusLast();
{ // Check the conversion again
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_FALSE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
EXPECT_EQ(kChars_Mozuku, conversion.segment(0).value());
}
}
TEST_F(SessionConverterTest, Issue1981020) {
SessionConverter converter(convertermock_.get());
// hiragana "ヴヴヴヴ"
composer_->InsertCharacterPreedit
("\xE3\x82\x94\xE3\x82\x94\xE3\x82\x94\xE3\x82\x94");
Segments segments;
convertermock_->SetFinishConversion(&segments, true);
converter.CommitPreedit(*composer_.get());
convertermock_->GetFinishConversion(&segments);
// katakana "ヴヴヴヴ"
EXPECT_EQ("\xE3\x83\xB4\xE3\x83\xB4\xE3\x83\xB4\xE3\x83\xB4",
segments.conversion_segment(0).candidate(0).value);
EXPECT_EQ("\xE3\x83\xB4\xE3\x83\xB4\xE3\x83\xB4\xE3\x83\xB4",
segments.conversion_segment(0).candidate(0).content_value);
}
TEST_F(SessionConverterTest, Issue2029557) {
// Unittest against http://b/2029557
// a<tab><F6> raised a DCHECK error.
SessionConverter converter(convertermock_.get());
// Composition (as "a")
composer_->InsertCharacterPreedit("a");
// Prediction (as <tab>)
Segments segments;
SetAiueo(&segments);
convertermock_->SetStartPredictionWithComposer(&segments, true);
EXPECT_TRUE(converter.Predict(*composer_));
EXPECT_TRUE(converter.IsActive());
// Transliteration (as <F6>)
segments.Clear();
Segment *segment = segments.add_segment();
segment->set_key("a");
Segment::Candidate *candidate = segment->add_candidate();
candidate->value = "a";
FillT13Ns(&segments, composer_.get());
convertermock_->SetStartConversionWithComposer(&segments, true);
EXPECT_TRUE(converter.ConvertToTransliteration(*composer_,
transliteration::HIRAGANA));
EXPECT_TRUE(converter.IsActive());
}
TEST_F(SessionConverterTest, Issue2031986) {
// Unittest against http://b/2031986
// aaaaa<Shift+Enter> raised a CRT error.
SessionConverter converter(convertermock_.get());
{ // Initialize a suggest result triggered by "aaaa".
Segments segments;
Segment *segment = segments.add_segment();
segment->set_key("aaaa");
Segment::Candidate *candidate;
candidate = segment->add_candidate();
candidate->value = "AAAA";
candidate = segment->add_candidate();
candidate->value = "Aaaa";
convertermock_->SetStartSuggestionWithComposer(&segments, true);
}
// Get suggestion
composer_->InsertCharacterPreedit("aaaa");
EXPECT_TRUE(converter.Suggest(*composer_));
EXPECT_TRUE(converter.IsActive());
{ // Initialize no suggest result triggered by "aaaaa".
Segments segments;
Segment *segment = segments.add_segment();
segment->set_key("aaaaa");
convertermock_->SetStartSuggestionWithComposer(&segments, false);
}
// Hide suggestion
composer_->InsertCharacterPreedit("a");
EXPECT_FALSE(converter.Suggest(*composer_));
EXPECT_FALSE(converter.IsActive());
}
TEST_F(SessionConverterTest, Issue2040116) {
// Unittest against http://b/2040116
//
// It happens when the first Predict returns results but the next
// MaybeExpandPrediction does not return any results. That's a
// trick by GoogleSuggest.
SessionConverter converter(convertermock_.get());
composer_->InsertCharacterPreedit("G");
{
// Initialize no predict result.
Segments segments;
segments.set_request_type(Segments::PREDICTION);
Segment *segment = segments.add_segment();
segment->set_key("G");
convertermock_->SetStartPredictionWithComposer(&segments, false);
}
// Get prediction
EXPECT_FALSE(converter.Predict(*composer_));
EXPECT_FALSE(converter.IsActive());
{
// Initialize a suggest result triggered by "G".
Segments segments;
segments.set_request_type(Segments::PREDICTION);
Segment *segment = segments.add_segment();
segment->set_key("G");
Segment::Candidate *candidate;
candidate = segment->add_candidate();
candidate->value = "GoogleSuggest";
convertermock_->SetStartPredictionWithComposer(&segments, true);
}
// Get prediction again
EXPECT_TRUE(converter.Predict(*composer_));
EXPECT_TRUE(converter.IsActive());
{ // Check the conversion.
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
EXPECT_EQ("GoogleSuggest", conversion.segment(0).value());
}
{
// Initialize no predict result triggered by "G". It's possible
// by Google Suggest.
Segments segments;
segments.set_request_type(Segments::PREDICTION);
Segment *segment = segments.add_segment();
segment->set_key("G");
convertermock_->SetStartPredictionWithComposer(&segments, false);
}
// Hide prediction
converter.CandidateNext(*composer_);
EXPECT_TRUE(converter.IsActive());
{ // Check the conversion.
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_TRUE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Preedit &conversion = output.preedit();
EXPECT_EQ(1, conversion.segment_size());
EXPECT_EQ("GoogleSuggest", conversion.segment(0).value());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(1, candidates.candidate_size());
}
}
TEST_F(SessionConverterTest, GetReadingText) {
SessionConverter converter(convertermock_.get());
// "阿伊宇江於"
const string kanji_aiueo =
"\xe9\x98\xbf\xe4\xbc\x8a\xe5\xae\x87\xe6\xb1\x9f\xe6\x96\xbc";
// "あいうえお"
const string hiragana_aiueo =
"\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a";
// Set up Segments for reverse conversion.
Segments reverse_segments;
Segment *segment;
segment = reverse_segments.add_segment();
segment->set_key(kanji_aiueo);
Segment::Candidate *candidate;
candidate = segment->add_candidate();
// For reverse conversion, key is the original kanji string.
candidate->key = kanji_aiueo;
candidate->value = hiragana_aiueo;
convertermock_->SetStartReverseConversion(&reverse_segments, true);
// Set up Segments for forward conversion.
Segments segments;
segment = segments.add_segment();
segment->set_key(hiragana_aiueo);
candidate = segment->add_candidate();
candidate->key = hiragana_aiueo;
candidate->value = kanji_aiueo;
convertermock_->SetStartConversionWithComposer(&segments, true);
string reading;
EXPECT_TRUE(converter.GetReadingText(kanji_aiueo, &reading));
EXPECT_EQ(hiragana_aiueo, reading);
}
TEST_F(SessionConverterTest, ZeroQuerySuggestion) {
SessionConverter converter(convertermock_.get());
// Set up a mock suggestion result.
Segments segments;
segments.set_request_type(Segments::SUGGESTION);
Segment *segment;
segment = segments.add_segment();
segment->set_key("");
segment->add_candidate()->value = "search";
segment->add_candidate()->value = "input";
convertermock_->SetStartSuggestionWithComposer(&segments, true);
EXPECT_TRUE(composer_->Empty());
EXPECT_TRUE(converter.Suggest(*composer_));
EXPECT_TRUE(converter.IsCandidateListVisible());
EXPECT_TRUE(converter.IsActive());
{ // Check the output
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
EXPECT_FALSE(output.has_preedit());
EXPECT_TRUE(output.has_candidates());
const commands::Candidates &candidates = output.candidates();
EXPECT_EQ(2, candidates.size());
EXPECT_EQ("search", candidates.candidate(0).value());
EXPECT_EQ("input", candidates.candidate(1).value());
}
}
// since History segments are almost hidden from
namespace {
class ConverterMockForReset : public ConverterMock {
public:
bool ResetConversion(Segments *segments) const {
reset_conversion_called_ = true;
return true;
}
bool reset_conversion_called() const {
return reset_conversion_called_;
}
void Reset() {
reset_conversion_called_ = false;
}
ConverterMockForReset() : reset_conversion_called_(false) {}
private:
mutable bool reset_conversion_called_;
};
class ConverterMockForRevert : public ConverterMock {
public:
bool RevertConversion(Segments *segments) const {
revert_conversion_called_ = true;
return true;
}
bool revert_conversion_called() const {
return revert_conversion_called_;
}
void Reset() {
revert_conversion_called_ = false;
}
ConverterMockForRevert() : revert_conversion_called_(false) {}
private:
mutable bool revert_conversion_called_;
};
} // namespace
TEST(SessionConverterResetTest, Reset) {
ConverterMockForReset convertermock;
ConverterFactory::SetConverter(&convertermock);
SessionConverter converter(&convertermock);
EXPECT_FALSE(convertermock.reset_conversion_called());
converter.Reset();
EXPECT_TRUE(convertermock.reset_conversion_called());
}
TEST(SessionConverterRevertTest, Revert) {
ConverterMockForRevert convertermock;
ConverterFactory::SetConverter(&convertermock);
SessionConverter converter(&convertermock);
EXPECT_FALSE(convertermock.revert_conversion_called());
converter.Revert();
EXPECT_TRUE(convertermock.revert_conversion_called());
}
TEST_F(SessionConverterTest, CommitHead) {
SessionConverter converter(convertermock_.get());
// "あいうえお"
composer_->InsertCharacterPreedit(aiueo_);
size_t committed_size;
converter.CommitHead(1, *composer_, &committed_size);
EXPECT_EQ(1, committed_size);
composer_->DeleteAt(0);
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_TRUE(output.has_result());
EXPECT_FALSE(output.has_candidates());
const commands::Result &result = output.result();
EXPECT_EQ("\xe3\x81\x82", result.value()); // "あ"
EXPECT_EQ("\xe3\x81\x82", result.key()); // "あ"
string preedit;
composer_->GetStringForPreedit(&preedit);
// "いうえお"
EXPECT_EQ("\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a", preedit);
converter.CommitHead(3, *composer_, &committed_size);
EXPECT_EQ(3, committed_size);
composer_->DeleteAt(0);
composer_->DeleteAt(0);
composer_->DeleteAt(0);
converter.FillOutput(*composer_, &output);
EXPECT_TRUE(output.has_result());
EXPECT_FALSE(output.has_candidates());
const commands::Result &result2 = output.result();
// "いうえ"
EXPECT_EQ("\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88", result2.value());
// "いうえ"
EXPECT_EQ("\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88", result2.key());
composer_->GetStringForPreedit(&preedit);
EXPECT_EQ("\xe3\x81\x8a", preedit); // "お"
}
TEST_F(SessionConverterTest, CommandCandidate) {
SessionConverter converter(convertermock_.get());
Segments segments;
SetAiueo(&segments);
FillT13Ns(&segments, composer_.get());
// set COMMAND_CANDIDATE.
segments.mutable_conversion_segment(0)->mutable_candidate(0)->attributes |=
Segment::Candidate::COMMAND_CANDIDATE;
convertermock_->SetStartConversionWithComposer(&segments, true);
composer_->InsertCharacterPreedit(aiueo_);
EXPECT_TRUE(converter.Convert(*composer_));
converter.Commit();
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(output.has_result());
}
TEST_F(SessionConverterTest, CommandCandidateWithCommitCommands) {
const string kKamabokono =
"\xe3\x81\x8b\xe3\x81\xbe\xe3\x81\xbc\xe3\x81\x93\xe3\x81\xae";
const string kInbou =
"\xe3\x81\x84\xe3\x82\x93\xe3\x81\xbc\xe3\x81\x86";
composer_->InsertCharacterPreedit(kKamabokono + kInbou);
{
// The first candidate is a command candidate, so
// CommitFirstSegment resets all conversion.
SessionConverter converter(convertermock_.get());
Segments segments;
SetKamaboko(&segments);
segments.mutable_conversion_segment(0)->mutable_candidate(0)->attributes =
Segment::Candidate::COMMAND_CANDIDATE;
convertermock_->SetStartConversionWithComposer(&segments, true);
converter.Convert(*composer_);
size_t committed_size = 0;
converter.CommitFirstSegment(&committed_size);
EXPECT_EQ(0, committed_size);
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_FALSE(converter.IsActive());
EXPECT_FALSE(output.has_result());
}
{
// The second candidate is a command candidate, so
// CommitFirstSegment commits all conversion.
SessionConverter converter(convertermock_.get());
Segments segments;
SetKamaboko(&segments);
segments.mutable_conversion_segment(1)->mutable_candidate(0)->attributes =
Segment::Candidate::COMMAND_CANDIDATE;
convertermock_->SetStartConversionWithComposer(&segments, true);
converter.Convert(*composer_);
size_t committed_size = 0;
converter.CommitFirstSegment(&committed_size);
EXPECT_EQ(Util::CharsLen(kKamabokono), committed_size);
commands::Output output;
converter.FillOutput(*composer_, &output);
EXPECT_TRUE(converter.IsActive());
EXPECT_TRUE(output.has_result());
}
{
// The selected suggestion with Id is a command candidate.
SessionConverter converter(convertermock_.get());
Segments segments;
SetAiueo(&segments);
segments.mutable_conversion_segment(0)->mutable_candidate(0)->attributes =
Segment::Candidate::COMMAND_CANDIDATE;
convertermock_->SetStartSuggestionWithComposer(&segments, true);
converter.Suggest(*composer_);
size_t committed_size = 0;
EXPECT_FALSE(converter.CommitSuggestionById(
0, *composer_, &committed_size));
EXPECT_EQ(0, committed_size);
}
{
// The selected suggestion with Index is a command candidate.
SessionConverter converter(convertermock_.get());
Segments segments;
SetAiueo(&segments);
segments.mutable_conversion_segment(0)->mutable_candidate(1)->attributes =
Segment::Candidate::COMMAND_CANDIDATE;
convertermock_->SetStartSuggestionWithComposer(&segments, true);
converter.Suggest(*composer_);
size_t committed_size = 0;
EXPECT_FALSE(converter.CommitSuggestionByIndex(
1, *composer_, &committed_size));
EXPECT_EQ(0, committed_size);
}
}
} // namespace session
} // namespace mozc
|
__precompile__()
module CodecLz4
using Compat
using TranscodingStreams: TranscodingStream, Memory, Error
using TranscodingStreams
export LZ4Compressor, LZ4CompressorStream,
LZ4Decompressor, LZ4DecompressorStream,
BlockSizeID, default_size, max64KB, max256KB, max1MB, max4MB,
BlockMode, block_linked, block_independent,
FrameType, normal_frame, skippable_frame
depsjl = joinpath(@__DIR__, "..", "deps", "deps.jl")
if isfile(depsjl)
include(depsjl)
else
error("CodecLz4 not properly installed. Please run Pkg.build(\"CodecLz4\") and restart julia")
end
struct LZ4Exception <: Exception
src::AbstractString
msg::AbstractString
end
function Base.showerror(io::IO, ex::LZ4Exception, bt; backtrace=false)
printstyled(io, "$(ex.src): $(ex.msg)", color=Base.error_color())
end
include("lz4frame.jl")
include("stream_compression.jl")
end
|
package treesandgraphs
private var xIndex = mapOf<Int, List<Int>>()
private var yIndex = mapOf<Int, List<Int>>()
private var visited = booleanArrayOf()
private fun dfs(stones: Array<IntArray>, index: Int): Int {
visited[index] = true
var sum = 0
for (i in xIndex[stones[index][0]]!!) {
if (!visited[i]) {
sum += dfs(stones, i) + 1
}
}
for (i in yIndex[stones[index][1]]!!) {
if (!visited[i]) {
sum += dfs(stones, i) + 1
}
}
return sum
}
fun removeStones(stones: Array<IntArray>): Int {
xIndex = stones.indices.groupBy({stones[it][0]},{it})
yIndex = stones.indices.groupBy({stones[it][1]},{it})
visited = BooleanArray(stones.size) { false }
var stonesToRemove = 0
for (i in stones.indices) {
if (!visited[i]) {
stonesToRemove += dfs(stones, i)
}
}
return stonesToRemove
}
fun main() {
println(removeStones(arrayOf(intArrayOf(0, 0), intArrayOf(0, 1), intArrayOf(1, 0),
intArrayOf(1, 2), intArrayOf(2, 1), intArrayOf(2, 2))))
}
|
import java.util.Scanner;
public class prog2
{
public static double v,sa;
public prog2(double r, double h)
{
sa=6*3.14*r*h;
v=3.14*r*r*h;
}
public static void main(String[] args)
{
Scanner obj1=new Scanner(System.in);
System.out.println("Enter the radius");
double n1=obj1.nextDouble();
System.out.println("Enter the height");
double n2=obj1.nextDouble();
prog2 obj2=new prog2(n1,n2);
System.out.println("The surface area is "+sa);
System.out.println("The volume is "+v);
}
}
|
package scala.collection.immutable
import org.junit.{Assert, Test}
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import scala.tools.testing.AssertUtil
@RunWith(classOf[JUnit4])
class RangeTest {
import Assert._
import AssertUtil._
@Test
def test_SI10060_numeric_range_min_max(): Unit = {
assertEquals(Range.Long.inclusive(1, 9, 1).min, 1)
assertEquals(Range.Long.inclusive(1, 9, 1).max, 9)
assertEquals(Range.Long.inclusive(9, 1, -1).min, 1)
assertEquals(Range.Long.inclusive(9, 1, -1).max, 9)
assertThrows[java.util.NoSuchElementException](Range.Long.inclusive(1, 9, -1).min)
assertThrows[java.util.NoSuchElementException](Range.Long.inclusive(1, 9, -1).max)
assertThrows[java.util.NoSuchElementException](Range.Long.inclusive(9, 1, 1).min)
assertThrows[java.util.NoSuchElementException](Range.Long.inclusive(9, 1, 1).max)
assertEquals(Range.Int.inclusive(1, 9, 1).min, 1)
assertEquals(Range.Int.inclusive(1, 9, 1).max, 9)
assertEquals(Range.Int.inclusive(9, 1, -1).min, 1)
assertEquals(Range.Int.inclusive(9, 1, -1).max, 9)
assertThrows[java.util.NoSuchElementException](Range.Int.inclusive(1, 9, -1).min)
assertThrows[java.util.NoSuchElementException](Range.Int.inclusive(1, 9, -1).max)
assertThrows[java.util.NoSuchElementException](Range.Int.inclusive(9, 1, 1).min)
assertThrows[java.util.NoSuchElementException](Range.Int.inclusive(9, 1, 1).max)
assertEquals(Range.inclusive(1, 9, 1).min, 1)
assertEquals(Range.inclusive(1, 9, 1).max, 9)
assertEquals(Range.inclusive(9, 1, -1).min, 1)
assertEquals(Range.inclusive(9, 1, -1).max, 9)
assertThrows[java.util.NoSuchElementException](Range.inclusive(1, 9, -1).min)
assertThrows[java.util.NoSuchElementException](Range.inclusive(1, 9, -1).max)
assertThrows[java.util.NoSuchElementException](Range.inclusive(9, 1, 1).min)
assertThrows[java.util.NoSuchElementException](Range.inclusive(9, 1, 1).max)
}
}
|
---
title: "POMP++: Facilitating Postmortem Program Diagnosis with Value-set Analysis"
date: 2019-01-01
publishDate: 2019-09-04
authors: ["Dongliang Mu", "Yunlan Du", "*Jianhao Xu*", "Jun Xu", "Xinyu Xing", "Bing Mao", "Peng Liu"]
publication_types: ["2"]
abstract: ""
featured: false
publication: "*IEEE Transactions on Software Engineering*"
---
|
---
title: Confirmation
letter: C
permalink: "/definitions/usc-confirmation.html"
body: Approval of a plan of reorganization by a bankruptcy judge.
published_at: '2018-08-06'
source: US Courts Glossary
layout: post
---
|
/**
* Copyright 2020 - 2022 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.drill.e2e
import com.epam.drill.admin.*
import com.epam.drill.admin.api.agent.*
import com.epam.drill.admin.api.group.*
import com.epam.drill.admin.api.routes.*
import com.epam.drill.admin.common.serialization.*
import com.epam.drill.admin.endpoints.*
import com.epam.drill.e2e.plugin.*
import com.epam.drill.plugin.api.processing.*
import com.epam.dsm.*
import com.epam.dsm.test.*
import io.ktor.http.*
import io.ktor.locations.*
import io.ktor.server.testing.*
import kotlinx.coroutines.*
import kotlinx.coroutines.channels.*
import org.junit.jupiter.api.*
import java.io.*
import java.util.*
import kotlin.time.*
import kotlin.time.TimeSource.*
abstract class AdminTest {
init {
TestDatabaseContainer.startOnce()
hikariConfig = TestDatabaseContainer.createDataSource()
}
var watcher: (suspend AsyncTestAppEngine.(Channel<GroupedAgentsDto>) -> Unit?)? = null
val projectDir = File("build/tmp/test/${this::class.simpleName}-${UUID.randomUUID()}")
lateinit var asyncEngine: AsyncTestAppEngine
val engine: TestApplicationEngine get() = asyncEngine.engine
lateinit var globToken: String
lateinit var storeManager: StoreClient
var agentPart: AgentPart<*>? = null
internal val testAgentContext = TestAgentContext()
fun uiWatcher(bl: suspend AsyncTestAppEngine.(Channel<GroupedAgentsDto>) -> Unit): AdminTest {
this.watcher = bl
return this
}
fun AsyncTestAppEngine.register(
agentId: String,
token: String = globToken,
payload: AgentRegistrationDto = AgentRegistrationDto(
name = "xz",
description = "ad",
systemSettings = SystemSettingsDto(
packages = listOf("testPrefix")
),
plugins = emptyList()
),
resultBlock: suspend (HttpStatusCode?, String?) -> Unit = { _, _ -> },
) = callAsync(context) {
with(engine) {
handleRequest(
HttpMethod.Post,
toApiUri(agentApi { ApiRoot.Agents.Agent(it, agentId) })
) {
addHeader(HttpHeaders.Authorization, "Bearer $token")
addHeader(HttpHeaders.ContentType, ContentType.Application.Json.toString())
setBody(AgentRegistrationDto.serializer() stringify payload)
}.apply { resultBlock(response.status(), response.content) }
}
}
}
fun TestApplicationEngine.toApiUri(location: Any): String = application.locations.href(location).let { uri ->
if (uri.startsWith("/api")) uri else "/api$uri"
}
fun TestApplicationEngine.toWsDestination(location: Any): String = application.toLocation(location)
@ExperimentalTime
fun CoroutineScope.createTimeoutJob(timeout: Duration, context: Job) = launch {
val expirationMark = Monotonic.markNow() + timeout
while (true) {
delay(50)
if (expirationMark.hasPassedNow()) {
context.cancelChildren()
fail("Timeout exception")
}
}
}
|
package loadbalancer
import (
"fmt"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/elbv2"
"github.com/cantara/nerthus/aws/server"
"github.com/cantara/nerthus/aws/util"
)
type Target struct {
targetGroup TargetGroup
server server.Server
elb *elbv2.ELBV2
created bool
}
func NewTarget(tg TargetGroup, s server.Server, elb *elbv2.ELBV2) (t Target, err error) {
err = util.CheckELBV2Session(elb)
if err != nil {
return
}
t = Target{
targetGroup: tg,
server: s,
elb: elb,
}
return
}
func (t *Target) Create() (id string, err error) {
err = util.CheckELBV2Session(t.elb)
if err != nil {
return
}
input := &elbv2.RegisterTargetsInput{
TargetGroupArn: aws.String(t.targetGroup.ARN),
Targets: []*elbv2.TargetDescription{
{
Id: aws.String(t.server.Id),
},
},
}
_, err = t.elb.RegisterTargets(input)
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
switch aerr.Code() {
case elbv2.ErrCodeTargetGroupNotFoundException:
err = util.CreateError{
Text: "Target group not found",
Err: aerr,
}
case elbv2.ErrCodeTooManyTargetsException:
err = util.CreateError{
Text: "Too many targets",
Err: aerr,
}
case elbv2.ErrCodeInvalidTargetException:
err = util.CreateError{
Text: "Invalid target",
Err: aerr,
}
case elbv2.ErrCodeTooManyRegistrationsForTargetIdException:
err = util.CreateError{
Text: "To many registrations for target id",
Err: aerr,
}
}
}
err = util.CreateError{
Text: fmt.Sprintf("Unable to register target for server %s in targetgroup %s.", t.server.Id, t.targetGroup.ARN),
Err: err,
}
return
}
t.created = true
return
}
func (t *Target) Delete() (err error) {
if !t.created {
return
}
err = util.CheckELBV2Session(t.elb)
if err != nil {
return
}
input := &elbv2.DeregisterTargetsInput{
TargetGroupArn: aws.String(t.targetGroup.ARN),
Targets: []*elbv2.TargetDescription{
{
Id: aws.String(t.server.Id),
},
},
}
_, err = t.elb.DeregisterTargets(input)
return
}
|
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Volo.Abp.Domain.Repositories;
namespace QYQ.DataManagement.FileManagement.Files;
public interface IFileRepository: IRepository<File, Guid>
{
Task<List<File>> GetPagingListAsync(
string filter = null,
int maxResultCount = 10,
int skipCount = 0,
CancellationToken cancellationToken = default);
Task<long> GetPagingCountAsync(string filter = null,
CancellationToken cancellationToken = default);
}
|
module Xlocalize
class Importer
def strings_content_from_translations_hash(translations_hash)
result = StringIO.new
translations_hash.each do |key, translations|
translations.each do |target, note|
result << "/* #{note} */\n" if note.length > 0
result << "\"#{key}\" = #{target.inspect};\n\n"
end
end
return result.string
end
def translate_from_node(translations, node)
(node > "body > trans-unit").each do |trans_unit|
key = trans_unit["id"]
target = (trans_unit > "target").text
note = (trans_unit > "note").text
if translations.key?(key)
translations[key] = { target => note }
end
end
end
end
end
|
package scaladex.core.model
import org.scalatest.funspec.AsyncFunSpec
import org.scalatest.matchers.should.Matchers
import scaladex.core.model.Artifact._
class ArtifactIdTests extends AsyncFunSpec with Matchers {
describe("parsing artifacts") {
it("parses scalajs") {
val artifactId = "cats-core_sjs0.6_2.11"
val expected = ArtifactId(Name("cats-core"), BinaryVersion(ScalaJs.`0.6`, Scala.`2.11`))
val result = ArtifactId.parse(artifactId)
result should contain(expected)
result.get.value shouldBe artifactId
}
it("parses scala-native") {
val artifactId = "cats-core_native0.4_2.11"
val expected = ArtifactId(Name("cats-core"), BinaryVersion(ScalaNative.`0.4`, Scala.`2.11`))
val result = ArtifactId.parse(artifactId)
result should contain(expected)
result.get.value shouldBe artifactId
}
it("parses scala3 versions") {
val artifactId = "circe_cats-core_3"
val expected = ArtifactId(Name("circe_cats-core"), BinaryVersion(Jvm, Scala.`3`))
val result = ArtifactId.parse(artifactId)
result should contain(expected)
result.get.value shouldBe artifactId
}
it("parses scala3 compiler") {
val artifactId = "scala3-compiler_3"
val expected = ArtifactId(Name("scala3-compiler"), BinaryVersion(Jvm, Scala.`3`))
val result = ArtifactId.parse(artifactId)
result should contain(expected)
result.get.value shouldBe artifactId
}
it("parses sbt") {
val artifactId = "sbt-microsites_2.12_1.0"
val expected =
ArtifactId(Name("sbt-microsites"), BinaryVersion(SbtPlugin.`1.0`, Scala.`2.12`))
val result = ArtifactId.parse(artifactId)
result should contain(expected)
result.get.value shouldBe artifactId
}
it("parse Java Artifact") {
val artifactId = "sparrow"
val expected = ArtifactId(Name("sparrow"), BinaryVersion(Jvm, Java))
val result = ArtifactId.parse(artifactId)
result should contain(expected)
result.get.value shouldBe artifactId
}
it("should not parse full scala version") {
ArtifactId.parse("scalafix-core_2.12.2") shouldBe None
}
it("should not parse correctly") {
ArtifactId.parse("scalafix-core_2.10_0.12") shouldBe None
}
it("handles special case") {
val artifactId = "banana_jvm_2.11"
val expected = ArtifactId(Name("banana_jvm"), BinaryVersion(Jvm, Scala.`2.11`))
val result = ArtifactId.parse(artifactId)
result should contain(expected)
result.get.value shouldBe artifactId
}
}
}
|
package main
import (
"fmt"
"github.com/marcoshuck/go-distributed/challenge_2/pkg/consumer"
"github.com/marcoshuck/go-distributed/challenge_2/pkg/queue"
"github.com/marcoshuck/go-distributed/challenge_2/pkg/sender"
"github.com/marcoshuck/go-distributed/challenge_2/pkg/simulations"
"github.com/streadway/amqp"
"log"
"os"
)
func main() {
user := os.Getenv("RABBITMQ_USER")
password := os.Getenv("RABBITMQ_PASSWORD")
host := os.Getenv("RABBITMQ_HOST")
port := os.Getenv("RABBITMQ_PORT")
log.Printf("Connecting to the AMQP server on %s:%s\n", host, port)
conn, err := amqp.Dial(fmt.Sprintf("amqp://%s:%s@%s:%s/", user, password, host, port))
defer conn.Close()
if err != nil {
log.Fatal("error while connecting to RabbitMQ:", err)
}
log.Println("Attempting to open new channel")
channel, err := conn.Channel()
defer channel.Close()
if err != nil {
log.Fatal("error while creating new channel:", err)
}
log.Println("Attempting to declare or use queue:", queue.SimulationRequests)
_, err = queue.NewAMQPQueue(channel, queue.SimulationRequests)
if err != nil {
log.Fatal("error while opening queue:", err)
}
log.Println("Attempting to declare or use queue:", queue.SimulationDeployment)
_, err = queue.NewAMQPQueue(channel, queue.SimulationDeployment)
if err != nil {
log.Fatal("error while opening queue:", err)
}
log.Println("Setting up new sender to connect to the worker pool microservice")
send := sender.NewSender(channel)
log.Println("Setting up new consumer to connect to the http microservice")
cons := consumer.NewConsumer(channel)
repository := simulations.NewRepository()
srv := simulations.NewService(repository, send, cons)
log.Println("Initializing simulations microservice")
err = srv.Init()
if err != nil {
log.Fatal("error while initializing simulation service:", err)
}
log.Println("Simulations microservice initialized")
select {}
}
|
Duplicate Reference definitions:
.
[a]: b
[a]: c
.
source/path:2: (WARNING/2) Duplicate reference definition: A
.
Missing Reference:
.
[a](b)
.
source/path:1: (WARNING/2) Reference not found: b
.
Unknown role:
.
abc
{xyz}`a`
.
source/path:3: (ERROR/3) Unknown interpreted text role "xyz".
.
Unknown directive:
.
```{xyz}
```
.
source/path:2: (ERROR/3) Unknown directive type "xyz".
.
Bad Front Matter:
.
---
a: {
---
.
source/path:1: (ERROR/3) Front matter block:
while parsing a flow node
expected the node content, but found '<stream end>'
in "<unicode string>", line 1, column 5:
a: {
^
.
Directive parsing error:
.
```{class}
```
.
source/path:2: (ERROR/3) Directive 'class': 1 argument(s) required, 0 supplied
.
Directive run error:
.
```{date}
x
```
.
source/path:2: (ERROR/3) Invalid context: the "date" directive can only be used within a substitution definition.
.
Non-consecutive headings:
.
# title 1
### title 3
.
source/path:2: (WARNING/2) Non-consecutive header level increase; 1 to 3
.
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using Web.Models;
namespace Web.Factory.AbstractFactory
{
public class EmployeeSystemFactory
{
public IComputerFactory Create(Employee e)
{
IComputerFactory retunValue = null;
if(e.EmployeeTypeId == 1)
{
if (e.JobDescription == "Manager")
{
retunValue = new MACLaptopFactory();
}
else
retunValue = new MACFactory();
}
else if(e.EmployeeTypeId == 2)
{
if (e.JobDescription == "Manager")
{
retunValue = new DellLaptopFactory();
}
else
retunValue = new DellFactory();
}
return retunValue;
}
}
}
|
require 'rdcl/package/package_part.rb'
require 'rdcl/package/nos_part.rb'
require 'rdcl/package/raw_part.rb'
require 'rdcl/package/protocol_part.rb'
module RDCL
class PackagePartFactory
def PackagePartFactory::part_factory(flags, file_data, offset, file_offset, size)
f = nil
case flags & PackagePart::PART_TYPE_MASK
when PackagePart::PROTOCOL_PART then f = :ProtocolPart
when PackagePart::NOS_PART then f = :NOSPart
when PackagePart::RAW_PART then f = :RawPart
end
return RDCL.const_get(f).new(file_data[file_offset, size], offset, file_offset, size)
end
end
end
|
// Copyright (c) 2017 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package segment
import (
"errors"
"github.com/m3db/m3/src/m3ninx/doc"
"github.com/m3db/m3/src/m3ninx/index"
"github.com/m3db/m3/src/m3ninx/postings"
)
var (
// ErrClosed is the error returned when attempting to perform operations on a
// segment that has already been closed.
ErrClosed = errors.New("segment has been closed")
)
// Segment is a sub-collection of documents within an index.
type Segment interface {
// FieldsIterable returns an iterable fields, for which is not
// safe for concurrent use. For concurrent use call FieldsIterable
// multiple times.
FieldsIterable() FieldsIterable
// TermsIterable returns an iterable terms, for which is not
// safe for concurrent use. For concurrent use call TermsIterable
// multiple times.
TermsIterable() TermsIterable
// Size returns the number of documents within the Segment. It returns
// 0 if the Segment has been closed.
Size() int64
// ContainsID returns a bool indicating if the Segment contains the provided ID.
ContainsID(docID []byte) (bool, error)
// ContainsField returns a bool indicating if the Segment contains the provided field.
ContainsField(field []byte) (bool, error)
// Reader returns a point-in-time accessor to search the segment.
Reader() (Reader, error)
// Close closes the segment and releases any internal resources.
Close() error
}
// Reader extends index reader interface to allow for reading
// of fields and terms.
type Reader interface {
index.Reader
FieldsIterable
TermsIterable
// ContainsField returns a bool indicating if the Segment contains the provided field.
ContainsField(field []byte) (bool, error)
}
// FieldsIterable can iterate over segment fields, it is not by default
// concurrency safe.
type FieldsIterable interface {
// Fields returns an iterator over the list of known fields, in order
// by name, it is not valid for reading after mutating the
// builder by inserting more documents.
Fields() (FieldsIterator, error)
}
// FieldsPostingsListIterable can iterate over segment fields/postings lists, it is not by default
// concurrency safe.
type FieldsPostingsListIterable interface {
// Fields returns an iterator over the list of known fields, in order
// by name, it is not valid for reading after mutating the
// builder by inserting more documents.
FieldsPostingsList() (FieldsPostingsListIterator, error)
}
// TermsIterable can iterate over segment terms, it is not by default
// concurrency safe.
type TermsIterable interface {
// Terms returns an iterator over the known terms values for the given
// field, in order by name, it is not valid for reading after mutating the
// builder by inserting more documents.
Terms(field []byte) (TermsIterator, error)
}
// OrderedBytesIterator iterates over a collection of []bytes in lexicographical order.
type OrderedBytesIterator interface {
// Next returns a bool indicating if there are any more elements.
Next() bool
// Current returns the current element.
// NB: the element returned is only valid until the subsequent call to Next().
Current() []byte
// Err returns any errors encountered during iteration.
Err() error
// Close releases any resources held by the iterator.
Close() error
}
// FieldsPostingsListIterator iterates over all known fields.
type FieldsPostingsListIterator interface {
Iterator
// Current returns the current field and associated postings list.
// NB: the field returned is only valid until the subsequent call to Next().
Current() ([]byte, postings.List)
}
// FieldsIterator iterates over all known fields.
type FieldsIterator interface {
Iterator
// Current returns the current field.
// NB: the field returned is only valid until the subsequent call to Next().
Current() []byte
}
// TermsIterator iterates over all known terms for the provided field.
type TermsIterator interface {
Iterator
// Current returns the current element.
// NB: the element returned is only valid until the subsequent call to Next().
Current() (term []byte, postings postings.List)
}
// Iterator holds common iterator methods.
type Iterator interface {
// Next returns a bool indicating if there are any more elements.
Next() bool
// Err returns any errors encountered during iteration.
Err() error
// Close releases any resources held by the iterator.
Close() error
}
// MutableSegment is a segment which can be updated.
type MutableSegment interface {
Segment
DocumentsBuilder
// Fields returns an iterator over the list of known fields, in order
// by name, it is not valid for reading after mutating the
// builder by inserting more documents.
Fields() (FieldsIterator, error)
// Offset returns the postings offset.
Offset() postings.ID
// Seal marks the Mutable Segment immutable.
Seal() error
// IsSealed returns true iff the segment is open and un-sealed.
IsSealed() bool
}
// ImmutableSegment is segment that has been written to disk.
type ImmutableSegment interface {
Segment
FreeMmap() error
}
// Builder is a builder that can be used to construct segments.
type Builder interface {
FieldsPostingsListIterable
TermsIterable
// Reset resets the builder for reuse.
Reset(offset postings.ID)
// Docs returns the current docs slice, this is not safe to modify
// and is invalidated on a call to reset.
Docs() []doc.Document
// AllDocs returns an iterator over the documents known to the Reader.
AllDocs() (index.IDDocIterator, error)
}
// DocumentsBuilder is a builder that has documents written to it.
type DocumentsBuilder interface {
Builder
index.Writer
// SetIndexConcurrency sets the concurrency used for building the segment.
SetIndexConcurrency(value int)
// IndexConcurrency returns the concurrency used for building the segment.
IndexConcurrency() int
}
// CloseableDocumentsBuilder is a builder that has documents written to it and has freeable resources.
type CloseableDocumentsBuilder interface {
DocumentsBuilder
Close() error
}
// SegmentsBuilder is a builder that is built from segments.
type SegmentsBuilder interface {
Builder
// AddSegments adds segments to build from.
AddSegments(segments []Segment) error
}
|
(ns wombats-web-client.utils.forms
(:require [re-frame.core :as re-frame]
[wombats-web-client.utils.errors :as e]))
(defn get-value [element]
(-> element .-target .-value))
(defn optionize [id-key name-key coll]
(map (fn [el] {:id (get-in el id-key)
:display-name (get-in el name-key)}) coll))
(defn cancel-modal-input []
[:input.modal-button {:type "button"
:value "CANCEL"
:on-click (fn []
(re-frame/dispatch [:set-modal nil]))}])
(defn submit-modal-input [submit-value on-click-fn]
[:input.modal-button {:type "button"
:value submit-value
:on-click on-click-fn}])
(defn get-key-as-string
[key-name]
(if (namespace key-name)
(str (namespace key-name) "/" (name key-name))
(name key-name)))
(defn- get-test-fn
[error]
(condp = error
e/required-field-error e/required-field-fn
e/not-an-integer e/not-an-integer-fn
e/min-five e/min-five-fn
e/max-twenty-five e/max-twenty-five-fn))
(defn input-error! [check cmpnt-state]
(let [{:keys [key-name test-fn error]} check
input (key-name @cmpnt-state)
key-string (get-key-as-string key-name)
error-key (keyword (str key-string "-error"))
test-func (if (nil? test-fn)
(get-test-fn error)
test-fn)]
(when (test-func input)
(swap! cmpnt-state assoc error-key error))))
|
const express = require("express")
const router = express.Router()
const cardano = require("../cardano/cardano.js")
const convertIPFSURL = url => {
path = pluckIPFSpath(url)
return "https://ipfs.io/ipfs/" + path
}
const pluckIPFSpath = url => url.replace(/ipfs/g, "/").split("/").slice(-1)[0]
const createAssetUIData = (acc, x) => {
const { image, files, ...y } = x
return acc.concat({
...y,
imageURL: convertIPFSURL(image),
})
}
router.get("/stake/assets", async (req, res) => {
const { stakeAddress } = req.query
try {
const results = await cardano.accountsAddressesAssets(stakeAddress)
let xs = []
results.forEach(async r => {
xs.push(cardano.assetsById(r.unit))
})
const ys = await Promise.all(xs)
const zs = ys.map(d => d.onchain_metadata).reduce(createAssetUIData, [])
res.json({
data: zs,
error: false,
})
} catch (err) {
res.json({
data: [],
error: true,
})
}
})
module.exports = router
|
#![allow(unused_assignments, unused_variables)]
// compile-flags: -C opt-level=2 # fix described in rustc_middle/mir/mono.rs
fn main() {
// Initialize test constants in a way that cannot be determined at compile time, to ensure
// rustc and LLVM cannot optimize out statements (or coverage counters) downstream from
// dependent conditions.
let is_true = std::env::args().len() == 1;
let is_false = ! is_true;
let mut some_string = Some(String::from("the string content"));
println!(
"The string or alt: {}"
,
some_string
.
unwrap_or_else
(
||
{
let mut countdown = 0;
if is_false {
countdown = 10;
}
"alt string 1".to_owned()
}
)
);
some_string = Some(String::from("the string content"));
let
a
=
||
{
let mut countdown = 0;
if is_false {
countdown = 10;
}
"alt string 2".to_owned()
};
println!(
"The string or alt: {}"
,
some_string
.
unwrap_or_else
(
a
)
);
some_string = None;
println!(
"The string or alt: {}"
,
some_string
.
unwrap_or_else
(
||
{
let mut countdown = 0;
if is_false {
countdown = 10;
}
"alt string 3".to_owned()
}
)
);
some_string = None;
let
a
=
||
{
let mut countdown = 0;
if is_false {
countdown = 10;
}
"alt string 4".to_owned()
};
println!(
"The string or alt: {}"
,
some_string
.
unwrap_or_else
(
a
)
);
let
quote_closure
=
|val|
{
let mut countdown = 0;
if is_false {
countdown = 10;
}
format!("'{}'", val)
};
println!(
"Repeated, quoted string: {:?}"
,
std::iter::repeat("repeat me")
.take(5)
.map
(
quote_closure
)
.collect::<Vec<_>>()
);
let
_unused_closure
=
|
mut countdown
|
{
if is_false {
countdown = 10;
}
"closure should be unused".to_owned()
};
let mut countdown = 10;
let _short_unused_closure = | _unused_arg: u8 | countdown += 1;
let short_used_covered_closure_macro = | used_arg: u8 | println!("called");
let short_used_not_covered_closure_macro = | used_arg: u8 | println!("not called");
let _short_unused_closure_macro = | _unused_arg: u8 | println!("not called");
let _short_unused_closure_block = | _unused_arg: u8 | { println!("not called") };
let _shortish_unused_closure = | _unused_arg: u8 | {
println!("not called")
};
let _as_short_unused_closure = |
_unused_arg: u8
| { println!("not called") };
let _almost_as_short_unused_closure = |
_unused_arg: u8
| { println!("not called") }
;
let _short_unused_closure_line_break_no_block = | _unused_arg: u8 |
println!("not called")
;
let _short_unused_closure_line_break_no_block2 =
| _unused_arg: u8 |
println!(
"not called"
)
;
let short_used_not_covered_closure_line_break_no_block_embedded_branch =
| _unused_arg: u8 |
println!(
"not called: {}",
if is_true { "check" } else { "me" }
)
;
let short_used_not_covered_closure_line_break_block_embedded_branch =
| _unused_arg: u8 |
{
println!(
"not called: {}",
if is_true { "check" } else { "me" }
)
}
;
let short_used_covered_closure_line_break_no_block_embedded_branch =
| _unused_arg: u8 |
println!(
"not called: {}",
if is_true { "check" } else { "me" }
)
;
let short_used_covered_closure_line_break_block_embedded_branch =
| _unused_arg: u8 |
{
println!(
"not called: {}",
if is_true { "check" } else { "me" }
)
}
;
if is_false {
short_used_not_covered_closure_macro(0);
short_used_not_covered_closure_line_break_no_block_embedded_branch(0);
short_used_not_covered_closure_line_break_block_embedded_branch(0);
}
short_used_covered_closure_macro(0);
short_used_covered_closure_line_break_no_block_embedded_branch(0);
short_used_covered_closure_line_break_block_embedded_branch(0);
}
|
require 'spec_helper'
describe DoubleDice::Matrix do
describe 'its constructor' do
it 'sets cleartext and password without sanitizing them' do
cleartext, password = 'foo'.split(//), 'bar'.split(//)
subject = DoubleDice::Matrix.new cleartext, password
subject.cleartext.must_equal cleartext
subject.password.must_equal password
end
it 'requires cleartext to be an array' do
cleartext, password = 'foo', 'bar'.split(//)
instantiate_matrix_and_rescue cleartext, password
end
it 'requires password to be an array' do
cleartext, password = 'foo'.split(//), 'bar'
instantiate_matrix_and_rescue cleartext, password
end
# Shared example for constructor parameter check
def instantiate_matrix_and_rescue(*args)
begin
DoubleDice::Matrix.new(*args)
refute true, 'Exception must be raised'
rescue => e
e.must_be_instance_of ArgumentError
end
end
end
describe '#to_a' do
it 'arranges the cleartext in lines' do
cleartext = %w(H E L L O W O R L D)
password = %w(F U B A R)
subject = DoubleDice::Matrix.new cleartext, password
expected = [
%w(H E L L O),
%w(W O R L D)
]
subject.to_a.must_equal expected
end
it 'fills last line with underscores' do
cleartext = %w(H E L L O W O R L D)
password = %w(F O O B A R)
subject = DoubleDice::Matrix.new cleartext, password
expected = [
%w(H E L L O W),
%w(O R L D _ _)
]
subject.to_a.must_equal expected
end
end
describe '#to_s' do
it 'returns the arranged cleartext with password and index line header rows' do
cleartext = %w(H E L L O W O R L D)
password = %w(F O O B A R)
subject = DoubleDice::Matrix.new cleartext, password
expected = [
password,
password_index_line(password),
%w(H E L L O W),
%w(O R L D _ _)
].map(&:inspect).join("\n")
subject.to_s.must_equal expected
end
def password_index_line(password)
DoubleDice::PasswordVector.new(password).to_a
end
end
describe '#sort' do
it 'rearranges columns' do
cleartext = %w(H E L L O W O R L D)
password = %w(F U B A R)
subject = DoubleDice::Matrix.new cleartext, password
# %w(H E L L O),
# %w(W O R L D)
expected = [
%w(L L H O E),
%w(L R W D O)
]
subject.sort.must_equal expected
end
end
end
|
#!/usr/bin/env bash
sudo rm -rf /Library/Developer/CommandLineTools
sudo xcode-select --install
|
using System.Collections.Generic;
using System.Linq;
using System.Xml.Linq;
using Innovator.Client;
using Innovator.Client.QueryModel;
namespace InnovatorAdmin.Editor
{
public static class AmlTransforms
{
public static void CriteriaToWhereClause(XElement elem)
{
var item = elem.DescendantsAndSelf("Item").First();
if (item.Attribute("type") == null)
return;
var origQueryType = item.Attribute("queryType")?.Value;
item.SetAttributeValue("queryType", "ignore");
var builder = new CriteriaBuilder()
{
Operator = "and",
Type = "[" + item.Attribute("type").Value.Replace(' ', '_') + "]"
};
ProcessCriteria(elem, builder);
var where = item.Attribute("where");
var whereClause = builder.ToString();
if (where != null)
whereClause += " and " + where.Value;
item.SetAttributeValue("where", whereClause);
}
private static void ProcessCriteria(XElement parent, CriteriaBuilder builder)
{
CriteriaBuilder newBuilder;
foreach (var elem in parent.Elements().ToArray())
{
switch (elem.Name.LocalName)
{
case "Relationships":
break;
case "not":
newBuilder = new CriteriaBuilder()
{
Not = true,
Operator = "and",
Type = builder.Type
};
ProcessCriteria(elem, newBuilder);
builder.Builders.Add(newBuilder);
elem.Remove();
break;
case "and":
newBuilder = new CriteriaBuilder()
{
Not = true,
Operator = "and",
Type = builder.Type
};
ProcessCriteria(elem, newBuilder);
builder.Builders.Add(newBuilder);
elem.Remove();
break;
case "or":
newBuilder = new CriteriaBuilder()
{
Not = true,
Operator = "or",
Type = builder.Type
};
ProcessCriteria(elem, newBuilder);
builder.Builders.Add(newBuilder);
elem.Remove();
break;
default:
builder.Add(elem);
elem.Remove();
break;
}
}
}
private class CriteriaBuilder
{
private List<string> _expressions = new List<string>();
private List<CriteriaBuilder> _builders = new List<CriteriaBuilder>();
public bool Not { get; set; }
public string Operator { get; set; }
public string Type { get; set; }
public IList<CriteriaBuilder> Builders { get { return _builders; } }
public void Add(XElement elem)
{
var conditionAttr = elem.Attribute("condition");
Add(elem.Name.LocalName, conditionAttr == null ? "eq" : conditionAttr.Value, elem.Value);
}
public void Add(string field, string op, string expression)
{
switch (op)
{
case "between":
_expressions.Add(Type + "." + field + " between " + expression);
break;
case "eq":
_expressions.Add(Type + "." + field + " = " + RenderValue(expression));
break;
case "ge":
_expressions.Add(Type + "." + field + " >= " + RenderValue(expression));
break;
case "gt":
_expressions.Add(Type + "." + field + " > " + RenderValue(expression));
break;
case "in":
if (!string.IsNullOrWhiteSpace(expression) && expression.TrimStart()[0] != '(')
expression = "(" + expression + ")";
_expressions.Add(Type + "." + field + " in " + expression);
break;
case "is not null":
case "is null":
_expressions.Add(Type + "." + field + " " + op);
break;
case "is":
_expressions.Add(Type + "." + field + " is " + expression);
break;
case "le":
_expressions.Add(Type + "." + field + " <= " + RenderValue(expression));
break;
case "like":
_expressions.Add(Type + "." + field + " like " + RenderValue(expression).Replace('*', '%'));
break;
case "lt":
_expressions.Add(Type + "." + field + " < " + RenderValue(expression));
break;
case "ne":
_expressions.Add(Type + "." + field + " <> " + RenderValue(expression));
break;
case "not between":
_expressions.Add(Type + "." + field + " not between " + expression);
break;
case "not in":
if (!string.IsNullOrWhiteSpace(expression) && expression.TrimStart()[0] != '(')
expression = "(" + expression + ")";
_expressions.Add("not " + Type + "." + field + " in " + expression);
break;
case "not like":
_expressions.Add("not " + Type + "." + field + " like " + RenderValue(expression).Replace('*', '%'));
break;
}
}
public override string ToString()
{
var expr = _expressions
.Concat(_builders.Select(b => "(" + b.ToString() + ")"))
.GroupConcat(" " + Operator.Trim() + " ", e => e);
if (Not)
return "not (" + expr + ")";
return expr;
}
private string RenderValue(string value)
{
double number;
if (double.TryParse(value, out number))
return value;
return "'" + value.Replace("'", "''") + "'";
}
}
}
}
|
@model ContactManager.Models.Contact
@using ContactManager.Authorization
@using ContactManager
@using ContactManager.Models
@using ContactManager.Models.AccountViewModels
@using ContactManager.Models.ManageViewModels
@using Microsoft.AspNetCore.Identity
@addTagHelper *, Microsoft.AspNetCore.Mvc.TagHelpers
@using Microsoft.AspNetCore.Authorization
@inject IAuthorizationService AuthorizationService
@{
ViewData["Title"] = "Details";
}
<h2>Details</h2>
<div>
<h4>Contact</h4>
<hr />
<dl class="dl-horizontal">
<dt>
@Html.DisplayNameFor(model => model.Name)
</dt>
<dd>
@Html.DisplayFor(model => model.Name)
</dd>
<dt>
@Html.DisplayNameFor(model => model.Email)
</dt>
<dd>
@Html.DisplayFor(model => model.Email)
</dd>
<dt>
@Html.DisplayNameFor(model => model.Address)
</dt>
<dd>
@Html.DisplayFor(model => model.Address)
</dd>
<dt>
@Html.DisplayNameFor(model => model.City)
</dt>
<dd>
@Html.DisplayFor(model => model.City)
</dd>
<dt>
@Html.DisplayNameFor(model => model.State)
</dt>
<dd>
@Html.DisplayFor(model => model.State)
</dd>
<dt>
@Html.DisplayNameFor(model => model.Zip)
</dt>
<dd>
@Html.DisplayFor(model => model.Zip)
</dd>
<dt>
@Html.DisplayNameFor(model => model.Status)
</dt>
<dd>
@Html.DisplayFor(model => model.Status)
</dd>
</dl>
</div>
@if (Model.Status != ContactStatus.Approved)
{
@if (await AuthorizationService.AuthorizeAsync(User, Model, ContactOperations.Approve))
{
<form asp-action="SetStatus" asp-controller="Contacts" style="display:inline;">
<input type="hidden" name="id" value="@Model.ContactId" />
<input type="hidden" name="status" value="@ContactStatus.Approved" />
<button type="submit" class="btn btn-xs btn-success">Approve</button>
</form>
}
}
@if (Model.Status != ContactStatus.Rejected)
{
@if (await AuthorizationService.AuthorizeAsync(User, Model, ContactOperations.Reject))
{
<form asp-action="SetStatus" asp-controller="Contacts" style="display:inline;">
<input type="hidden" name="id" value="@Model.ContactId" />
<input type="hidden" name="status" value="@ContactStatus.Rejected" />
<button type="submit" class="btn btn-xs btn-danger">Reject</button>
</form>
}
}
<div>
@* Uncomment to perform authorization check. A real app would hide the edit link from users
uses who don't have edit access. A user without edit access can click the link but will get denied
access in the controller.
@if(await AuthorizationService.AuthorizeAsync(User, Model, ContactOperations.Update))
{
*@
<a asp-action="Edit" asp-route-id="@Model.ContactId">Edit</a> <text>|</text>
@*
}
*@
<a asp-action="Index">Back to List</a>
</div>
|
import { Component, OnInit } from '@angular/core';
import { LoginService } from '../services/login.service';
@Component({
selector: 'app-login',
templateUrl: './login.component.html',
})
export class LoginComponent implements OnInit {
enabled = false; // show the component only if appId service exists
profile: any;
constructor(private loginService: LoginService) { }
async ngOnInit() {
const enabled = await this.loginService.isEnabled();
this.enabled = enabled;
if (enabled) {
this.loginService.getUser()
.then(user => {
if(user.logged) {
this.profile = user.profile
}
});
}
}
}
|
package numbers
// this is the sketch
import (
"encoding/json"
"testing"
)
func TestTypeHandling(t *testing.T) {
jsN1 := json.Number("1")
e1 := int64(1)
r1 := toType(jsN1, int64Type)
if r1 != e1 {
t.Errorf("expected %d, got %T %v", e1, r1, r1)
}
jsN2 := json.Number("1.5")
e2 := float64(1.5)
r2 := toType(jsN2, float64Type)
if r2 != e2 {
t.Errorf("expected %g, got %T %v", e2, r2, r2)
}
e3 := float64(2.5)
r3 := Add(jsN1, jsN2)
rType := numberType(r3)
if rType != float64Type {
t.Errorf("expected float64Type, got %T %v", r3, r3)
}
r3f := toType(r3, float64Type)
if r3f != e3 {
t.Errorf("expected %g, got %T %v", e3, r3, r3)
}
e4 := float64(0.5)
r4 := Subtract(jsN2, jsN1)
rType = numberType(r4)
if rType != float64Type {
t.Errorf("expected float64Type, got %T %v", r4, r4)
}
r4f := toType(r4, float64Type)
if r4f != e4 {
t.Errorf("expected %g, got %T %v", e4, r4, r4)
}
e5 := float64(-0.5)
r5 := Subtract(jsN1, jsN2)
rType = numberType(r5)
if rType != float64Type {
t.Errorf("expected float64Type, got %T %v", r5, r5)
}
r5f := toType(r5, float64Type)
if r5f != e5 {
t.Errorf("expected %g, got %T %v", e5, r5, r5)
}
e6 := int(6)
r6 := Addi(int64(2), float64(4.0))
rType = numberType(r6)
if rType != intType {
t.Errorf("expected intType, got %T %v", r6, r6)
}
r6i := toType(r6, intType)
if r6i != e6 {
t.Errorf("expected %d, got %T %v", e6, r6, r6)
}
}
|
prompt --application/shared_components/logic/application_computations/title
begin
wwv_flow_api.create_flow_computation(
p_id=>wwv_flow_api.id(27333639191845812157)
,p_computation_sequence=>10
,p_computation_item=>'TITLE'
,p_computation_point=>'ON_NEW_INSTANCE'
,p_computation_type=>'STATIC_ASSIGNMENT'
,p_computation_processed=>'REPLACE_EXISTING'
,p_computation=>'Development'
);
end;
/
|
import logging
import random
from huey import crontab
from chain.common.plugins import load_plugin
from chain.hughie.config import huey
from .peer import Peer
logger = logging.getLogger(__name__)
@huey.task()
def add_peer(ip, port, chain_version, nethash, os):
# TODO: Disable this function if peer discoverability is disabled in config
peer_manager = load_plugin("chain.plugins.peers")
peer = Peer(ip=ip, port=port, chain_version=chain_version, nethash=nethash, os=os)
if not peer.is_valid() or peer_manager.is_peer_suspended(peer):
logger.warning("Peer %s:%s is invalid or suspended.", peer.ip, peer.port)
return
if peer_manager.peer_with_ip_exists(peer.ip):
logger.warning("Peer %s:%s already exists.", peer.ip, peer.port)
return
# Wait for a bit more than 3s for response in case the node you're pinging is
# the official node and is trying to add you as their peer and your p2p is not
# responding. When they ping you, they wait for 3 seconds for response and if they
# don't get any, they reject your response even though the request to them was
# valid
# try:
try:
peer.verify_peer()
except Exception as e: # TODO: Be more specific
logger.exception("Suspended peer because %s", str(e))
peer_manager.suspend_peer(peer)
else:
logger.info(
"Accepting peer %s:%s. Vefification: %s",
peer.ip,
peer.port,
peer.verification,
)
peer_manager.redis.set(peer_manager.key_active.format(peer.ip), peer.to_json())
@huey.task()
def reverify_peer(ip):
peer_manager = load_plugin("chain.plugins.peers")
peer = peer_manager.get_peer_by_ip(ip)
logger.info("Reverifying peer %s:%s", peer.ip, peer.port)
if peer:
try:
peer.verify_peer()
except Exception as e: # TODO: be more specific
logger.error("Peer %s:%s failed verification: %s", peer.ip, peer.port, e)
peer_manager.suspend_peer(peer)
else:
logger.info("Peer %s:%s successfully reverified", peer.ip, peer.port)
peer_manager.redis.set(
peer_manager.key_active.format(peer.ip), peer.to_json()
)
else:
logger.warning("Couldn't find a peer to reverify")
@huey.task()
def reverify_all_peers():
peer_manager = load_plugin("chain.plugins.peers")
peers = peer_manager.peers()
logger.info("Reverifying all %s peers", len(peers))
for peer in peers:
reverify_peer(peer.ip)
@huey.periodic_task(crontab(minute="*/10"))
def discover_peers():
"""
Fetch peers of your existing peers to increase the number of peers.
"""
# TODO: Disable this function if peer discoverability is disabled in config
peer_manager = load_plugin("chain.plugins.peers")
peers = peer_manager.peers()
# Shuffle peers so we always get the peers from the different peers at the start
random.shuffle(peers)
for index, peer in enumerate(peers):
his_peers = peer.fetch_peers()
for his_peer in his_peers:
add_peer(
ip=his_peer.ip,
port=his_peer.port,
chain_version=his_peer.chain_version,
nethash=his_peer.nethash,
os=his_peer.os,
)
# Always get peers from at least 4 sources. As add_peer is async,
# `has_minimum_peers` might actually return wrong result, but that will only
# increase the number of peers we have.
if index >= 4 and peer_manager.has_minimum_peers():
break
reverify_all_peers()
|
package io.novafoundation.nova.feature_staking_impl.domain.validations.bond
import io.novafoundation.nova.common.validation.ValidationSystem
import io.novafoundation.nova.feature_wallet_api.domain.validation.EnoughToPayFeesValidation
import io.novafoundation.nova.feature_wallet_api.domain.validation.PositiveAmountValidation
typealias BondMoreFeeValidation = EnoughToPayFeesValidation<BondMoreValidationPayload, BondMoreValidationFailure>
typealias NotZeroBondValidation = PositiveAmountValidation<BondMoreValidationPayload, BondMoreValidationFailure>
typealias BondMoreValidationSystem = ValidationSystem<BondMoreValidationPayload, BondMoreValidationFailure>
|
@extends("base")
@section('contenu')
<p>Le système MVC utiliste un Controller, qui correspond au Design Pattern Command.</p>
@endsection
@section('title')
Command
@endsection
|
#!/bin/bash
MONGODB_URI=${1}
if [ -z ${MONGODB_URI} ]
then
read -p "MONGODB URI (Required): " MONGODB_URI
fi
echo "Executing ... "
docker run --rm -e MONGODB_URI=${MONGODB_URI} \
-v "$(pwd)":/workspace/php \
-w /workspace/php ghcr.io/mongodb-developer/get-started-php:0.1 \
"php getstarted.php"
|
package com.alamkanak.weekview
internal class HeaderRowHeightUpdater<T>(
private val config: WeekViewConfigWrapper,
private val cache: EventCache<T>
) : Updater {
private var previousHorizontalOrigin: Float? = null
override val isRequired: Boolean
get() {
return true
// Fixme
/*val currentTimeColumnWidth = config.timeTextWidth + config.timeColumnPadding * 2
val didTimeColumnChange = currentTimeColumnWidth != config.timeColumnWidth
val didScrollHorizontally = previousHorizontalOrigin != config.currentOrigin.x
return didTimeColumnChange || didScrollHorizontally*/
}
override fun update(drawingContext: DrawingContext) {
previousHorizontalOrigin = config.currentOrigin.x
config.timeColumnWidth = config.timeTextWidth + config.timeColumnPadding * 2
refreshHeaderHeight(drawingContext)
}
private fun refreshHeaderHeight(drawingContext: DrawingContext) {
val dateRange = drawingContext.dateRangeWithStartPixels.map { it.first }
val visibleEvents = cache.getAllDayEventsInRange(dateRange)
config.hasEventInHeader = visibleEvents.isNotEmpty()
config.refreshHeaderHeight()
}
}
|
import logger from '../../utils/logger';
import getCacheKey from './cacheKey';
const getCache = (key, options = {}) => async (ctx, next) => {
const cacheKey = getCacheKey(ctx)(key, options);
const result = await ctx.cache.get(cacheKey);
if (result) {
logger.info(`[REQUEST-CACHE:GET][${cacheKey}]`);
ctx.body = {
success: true,
result,
};
return;
}
ctx.query.cacheKeys = [cacheKey];
ctx.query.shouldCache = true;
await next();
};
const setCache = (options = {}) => async (ctx) => {
const { cacheKeys, shouldCache } = ctx.query;
const { result } = ctx.body;
const expire = options.expire || 86400 * 3; // three days
if (cacheKeys && cacheKeys.length && result && shouldCache) {
for (let i = 0; i < cacheKeys.length; i += 1) {
const cacheKey = cacheKeys[i];
logger.info(`[REQUEST-CACHE:SET][${cacheKey}]`);
await ctx.cache.set(cacheKey, result, { expire });
}
}
};
const removeCache = (keys = []) => async (ctx) => {
const targetKeys = ctx.query.deleteKeys || keys;
for (let i = 0; i < targetKeys.length; i += 1) {
logger.info(`[REQUEST-CACHE:DEL][${targetKeys[i]}]`);
await ctx.cache.del(targetKeys[i]);
}
};
export default {
get: getCache,
set: setCache,
del: removeCache
};
|
<?php
namespace Home\Controller;
use Think\Controller;
class BaseController extends Controller{
}
?>
|
import { Component } from '@angular/core';
import { TuiEditorOptions, TuiService } from 'ngx-tui';
@Component({
selector: 'ngx-tui-root',
template: `
<div class="h-100 d-flex">
<div class="bg-dark">
<div class="card bg-dark">
<div class="card-body">
<div class="btn-group">
<button (click)="storeHtml()" class="btn btn-outline-success">
Save HTML
</button>
<button (click)="storeMarkdown()" class="btn btn-outline-success">
Save Markdown
</button>
</div>
<hr />
<div class="btn-group">
<button
(click)="setPreviewStyleTab()"
class="btn btn-outline-success"
>
Preview Style Tab
</button>
<button
(click)="setPreviewStyleVertical()"
class="btn btn-outline-success"
>
Preview Style Vertical
</button>
</div>
</div>
<div *ngIf="result" class="mt-3 bg-white p-2">
<pre class="m-0">{{ result }}</pre>
</div>
</div>
</div>
<div class="flex-grow-1">
<tui-editor [options]="options"></tui-editor>
</div>
</div>
`,
})
export class AppComponent {
markdown = `# This is your Markdown
This is GraphQL
\`\`\`graphql
fragment UserDetails on User {
username
email
avatarUrl
}
mutation Login($username: String! $password: String!) {
login(username: $username password: $password) {
token
...UserDetails
}
}
\`\`\`
This is HTML
\`\`\`html
<header>
<nav>
<a href="/">Home</a>
</nav>
</header>
\`\`\`
This is Markdown
\`\`\`markdown
# My Title
- an
- unordered
- list
1. an
1. ordered
1. list
**This** _is_ \`code\`.
\`\`\`
This is TypeScript
\`\`\`typescript
const key: string = 'value';
\`\`\`
`;
options: TuiEditorOptions = {
initialValue: this.markdown,
initialEditType: 'markdown',
previewStyle: 'vertical',
height: '100%',
};
result?: string;
constructor(private readonly service: TuiService) {}
public storeHtml() {
this.result = this.service.getHtml();
}
public storeMarkdown() {
this.result = this.service.getMarkdown();
}
public setPreviewStyleTab() {
this.service.changePreviewStyle('tab');
}
public setPreviewStyleVertical() {
this.service.changePreviewStyle('vertical');
}
}
|
package com.larrynguyen.notewall.menu
import android.os.Parcel
import android.os.Parcelable
import android.support.annotation.DrawableRes
import android.support.annotation.IdRes
import android.support.annotation.StringRes
data class MenuItem(@IdRes val id: Int,
@StringRes val title: Int,
@DrawableRes val icon: Int = -1) : Parcelable {
constructor(parcel: Parcel) : this(
parcel.readInt(),
parcel.readInt(),
parcel.readInt())
override fun writeToParcel(parcel: Parcel, flags: Int) {
parcel.writeInt(id)
parcel.writeInt(title)
parcel.writeInt(icon)
}
override fun describeContents() = 0
companion object CREATOR : Parcelable.Creator<MenuItem> {
override fun createFromParcel(parcel: Parcel) = MenuItem(parcel)
override fun newArray(size: Int) = arrayOfNulls<MenuItem?>(size)
}
}
|
<?php
//fetch_data.php
//include('config/config.php');
$connect = new PDO("mysql:host=localhost;dbname=cinema_db", "root", "");
$form_data = json_decode(file_get_contents("php://input"));
if ($form_data->action == 'getcolor') {
$query = "SELECT colortheme FROM usersettingstable WHERE user_fk='".$form_data->user_profile."'";
$statement = $connect->prepare($query);
if ($statement->execute()) {
while ($row = $statement->fetch(PDO::FETCH_ASSOC)) {
$data[] = $row['colortheme'];
}
}
}
elseif ($form_data->action == 'setcolor') {
$query = "
UPDATE usersettingstable
SET colortheme='".$form_data->color."' WHERE user_fk='".$form_data->user_profile."'
";
$statement = $connect->prepare($query);
if($statement->execute())
{
}
$query = "SELECT colortheme FROM usersettingstable WHERE user_fk='".$form_data->user_profile."'";
$statement = $connect->prepare($query);
if ($statement->execute()) {
while ($row = $statement->fetch(PDO::FETCH_ASSOC)) {
$data[] = $row['colortheme'];
}
}
}
echo json_encode($data);
?>
|
package day10
import java.io.File
val chunkPairs = mapOf(
'(' to ')',
'[' to ']',
'{' to '}',
'<' to '>',
)
val illegalCharacterPoints = mapOf(
')' to 3,
']' to 57,
'}' to 1197,
'>' to 25137,
)
val autocompleteCharacterPoints = mapOf(
')' to 1,
']' to 2,
'}' to 3,
'>' to 4,
)
fun main() {
val lines = File("src/main/kotlin/day10/input.txt").readLines()
val autoCompletesByState = lines
.map { it.toCharArray() }
.map { autocomplete(it) }
.partition { it.failed() }
println(autoCompletesByState.first
.mapNotNull { it.illegalChar }
.mapNotNull { illegalCharacterPoints[it] }
.sumOf { it }
)
println(autoCompletesByState.second
.mapNotNull { it.missedChars }
.map { it.fold(0L) { acc, unit -> acc * 5 + (autocompleteCharacterPoints[unit] ?: 0) } }
.sorted().let { it[it.size / 2] })
}
fun autocomplete(line: CharArray): Autocomplete {
val stack = ArrayDeque<Char>()
for (char in line) {
if (char in chunkPairs.keys) {
stack.addLast(char)
} else {
stack.removeLastOrNull()?.let {
if (chunkPairs[it] != char) {
return Autocomplete(illegalChar = char)
}
}
}
}
return Autocomplete(missedChars = stack.mapNotNull { chunkPairs[it] }.reversed())
}
data class Autocomplete(val illegalChar: Char? = null, val missedChars: List<Char>? = null) {
fun failed() = illegalChar != null
}
|
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.cogmath.algebra.complex
import cogx.cogmath.algebra.real.Tensor3
import cogx.cogmath.geometry.Shape
/** An order-3 complex tensor.
*
* @author Greg Snider
*/
class ComplexTensor3(val real: Tensor3, val imaginary: Tensor3) {
require(real.layers == imaginary.layers)
require(real.rows == imaginary.rows)
require(real.columns == imaginary.columns)
val layers = real.layers
val rows = real.rows
val columns = real.columns
/** Create a complex matrix 3d from a real matrix 3d. */
def this(real: Tensor3) =
this(real, new Tensor3(real.layers, real.rows, real.columns))
/** Create a zero-filled complex matrix 3d. */
def this(layers: Int, rows: Int, columns: Int) =
this(new Tensor3(layers, rows, columns), new Tensor3(layers, rows, columns))
def copy = new ComplexTensor3(real.copy, imaginary.copy)
def map(f: (Complex) => Complex): ComplexTensor3 = {
val result = new ComplexTensor3(layers, rows, columns)
for (layer <- 0 until layers; row <- 0 until rows; col <- 0 until columns) {
val input = Complex(real(layer, row, col), imaginary(layer, row, col))
result(layer, row, col) = f(input)
}
result
}
def flip = new ComplexTensor3(real.flip, imaginary.flip)
/** Read location (`layer`, `row`, `col`). */
def apply(layer: Int, row: Int, col: Int) =
Complex(real(layer, row, col), imaginary(layer, row, col))
/** Write location (`layer`, `row`, `col`) with `value`. */
def update(layer: Int, row: Int, col: Int, value: Complex) {
real(layer, row, col) = value.real
imaginary(layer, row, col) = value.imaginary
}
/** Expand the matrix, optionally extending the border into the expanded
* region.
*
* This operation is a key part of the FFT. The new matrix is of
* shape `bigShape` and element (0, 0) of this is anchored at
* (0, 0) in the larger matrix. If "borderFill" is true, then the four
* edges of the matrix are extended evenly in all directions, as though
* the bigger matrix were actually a torus with opposite edges touching.
*/
def expand(bigShape: Shape, borderFill: Boolean = false): ComplexTensor3 = {
require(!borderFill, "BorderFill not yet supported for ComplexMatrix3D.")
new ComplexTensor3(real.expand(bigShape), imaginary.expand(bigShape))
}
/** Shift the values in "this" as though "this" were a torus with wrap around
* at the edges, top to bottom, left to right.
*
* The shifting is negative, so
* the value in this at location (`deltaRows`, `deltaColumns`) gets shifted
* to location (0, 0) in the result.
*/
def shift(deltaX: Int, deltaY: Int, deltaZ: Int): ComplexTensor3 = {
new ComplexTensor3(real.shift(deltaX, deltaY, deltaZ),
imaginary.shift(deltaX, deltaY, deltaZ))
}
def print {
println("ComplexMatrix3D real:")
real.print
println("ComplexMatrix3D imaginary:")
imaginary.print
}
}
|
# smart-queue
## Description
When an action enters de queue, check if there's another action inside the queue that targets
the same resource, (will need to use a unique identifier). If the action exists, check the
method. The method can be one of following **CREATE**, **READ**, **UPDATE**, or **DELETE** (CRUD). Depending
on the method, we'll do one kind side-effect or anot****her.
## Side-Effects
> New action enters outbox -> Existing outbox action for same resource ->> outbox side-effect to
apply
1. UPDATE action -> CREATE action ->> Merge UPDATE action into CREATE action.
2. DELETE action -> CREATE action ->> Remove CREATE action from outbox.
3. DELETE action -> UPDATE action ->> Replace UPDATE action with DELETE.
4. READ action -> READ action ->> Squash READ actions into a single action.
## Considerations
1. Behaviour should be effect agnostic, can't depend in default effect method.
2. When creating a new resource (CREATE), the app won't know the id of the resource. Another way
of identifying resourced must be devised.
3. DELETE over CREATE must only delete the exact same resource (which is not straightforward). In
example, if there's two TODOs with CREATE method, a new DELETE method action, should be able to
remove only one of those two actions, and this should be the correct one.
|
# Coders' Workshop: July 17, 2019
## Announcements
* Monday, 7/22: [Vue.js Meetup](https://www.meetup.com/Denver-Vue-js-Meetup/events/ltwpwmyzkbdc/) - Lightning Talks!
* Monday, 7/29: [CO.js Meetup](https://www.meetup.com/Bootcampers-Collective/events/ztvncryzkbmc/)
## Appetizer
What would you charge to wash all the windows in Denver?
## Topic: Sorting Algorithms (w/ a little ES6)
[Slides](https://slides.com/bbyunis/coder-s-workshop-2-5-7-9-12-14)
## Challenges
[Group Anagrams](../../../Coding-Challenges/groupAnagrams)
[Bubble Sort](../../../Coding-Challenges/bubbleSort)
[Quick Sort](../../../Coding-Challenges/quickSort)
|
class ShelfFinder::Result
class ShelfResult < BaseResult
attr_reader :identifier
attr_reader :segments
def initialize(shelf)
@identifier = shelf.identifier
@segments = []
end
def add_segment(segment)
@segments << segment unless @segments.include?(segment)
end
end
end
|
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 1996-2016. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*
*/
/*
* Function: Makes it possible to send and receive Erlang
* messages from the (Unix) command line.
* Note: We don't free any memory at all since we only
* live for a short while.
*
*/
#ifdef __WIN32__
#include <winsock2.h>
#include <direct.h>
#include <windows.h>
#include <winbase.h>
#elif VXWORKS
#include <stdio.h>
#include <string.h>
#include <vxWorks.h>
#include <hostLib.h>
#include <selectLib.h>
#include <ifLib.h>
#include <sockLib.h>
#include <taskLib.h>
#include <inetLib.h>
#include <unistd.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netinet/tcp.h>
#include <time.h>
#else /* unix */
#include <sys/types.h>
#include <sys/uio.h>
#include <sys/time.h>
#include <unistd.h>
#include <sys/param.h>
#include <netdb.h>
#include <sys/times.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#if TIME_WITH_SYS_TIME
# include <sys/time.h>
# include <time.h>
#else
# if HAVE_SYS_TIME_H
# include <sys/time.h>
# else
# include <time.h>
# endif
#endif
#endif
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include <fcntl.h>
#include <signal.h>
#include "ei.h"
#include "ei_resolve.h"
#include "erl_start.h" /* FIXME remove dependency */
#ifdef __WIN32__
static void initWinSock(void);
#endif
/*
* Some nice global variables
* (I don't think "nice" is the right word actually... -gordon)
*/
/* FIXME problem for threaded ? */
struct call_flags {
int startp;
int cookiep;
int modp;
int evalp;
int randomp;
int use_long_name; /* indicates if -name was used, else -sname or -n */
int debugp;
int verbosep;
int haltp;
char *cookie;
char *node;
char *hidden;
char *apply;
char *script;
};
static void usage_arg(const char *progname, const char *switchname);
static void usage_error(const char *progname, const char *switchname);
static void usage(const char *progname);
static int get_module(char **mbuf, char **mname);
static int do_connect(ei_cnode *ec, char *nodename, struct call_flags *flags);
static int read_stdin(char **buf);
static void split_apply_string(char *str, char **mod,
char **fun, char **args);
static void* ei_chk_malloc(size_t size);
static void* ei_chk_calloc(size_t nmemb, size_t size);
static void* ei_chk_realloc(void *old, size_t size);
static char* ei_chk_strdup(char *s);
/***************************************************************************
*
* XXXXX
*
***************************************************************************/
/* FIXME isn't VxWorks to handle arguments differently? */
#if !defined(VXWORKS)
int main(int argc, char *argv[])
#else
int erl_call(int argc, char **argv)
#endif
{
int i = 1,fd,creation;
struct hostent *hp;
char host_name[EI_MAXHOSTNAMELEN+1];
char nodename[MAXNODELEN+1];
char *p = NULL;
char *ct = NULL; /* temporary used when truncating nodename */
int modsize = 0;
char *host = NULL;
char *module = NULL;
char *modname = NULL;
struct call_flags flags = {0}; /* Default 0 and NULL in all fields */
char* progname = argv[0];
ei_cnode ec;
/* Get the command line options */
while (i < argc) {
if (argv[i][0] != '-') {
usage_error(progname, argv[i]);
}
if (strcmp(argv[i], "-sname") == 0) { /* -sname NAME */
if (i+1 >= argc) {
usage_arg(progname, "-sname ");
}
flags.node = ei_chk_strdup(argv[i+1]);
i++;
flags.use_long_name = 0;
} else if (strcmp(argv[i], "-name") == 0) { /* -name NAME */
if (i+1 >= argc) {
usage_arg(progname, "-name ");
}
flags.node = ei_chk_strdup(argv[i+1]);
i++;
flags.use_long_name = 1;
} else {
if (strlen(argv[i]) != 2) {
usage_error(progname, argv[i]);
}
switch (argv[i][1]) {
case 's':
flags.startp = 1;
break;
case 'q':
flags.haltp = 1;
break;
case 'v':
flags.verbosep = 1;
break;
case 'd':
flags.debugp = 1;
break;
case 'r':
flags.randomp = 1;
break;
case 'e':
flags.evalp = 1;
break;
case 'm':
flags.modp = 1;
break;
case 'c':
if (i+1 >= argc) {
usage_arg(progname, "-c ");
}
flags.cookiep = 1;
flags.cookie = ei_chk_strdup(argv[i+1]);
i++;
break;
case 'n':
if (i+1 >= argc) {
usage_arg(progname, "-n ");
}
flags.node = ei_chk_strdup(argv[i+1]);
flags.use_long_name = 1;
i++;
break;
case 'h':
if (i+1 >= argc) {
usage_arg(progname, "-h ");
}
flags.hidden = ei_chk_strdup(argv[i+1]);
i++;
break;
case 'x':
if (i+1 >= argc) {
usage_arg(progname, "-x ");
}
flags.script = ei_chk_strdup(argv[i+1]);
i++;
break;
case 'a':
if (i+1 >= argc) {
usage_arg(progname, "-a ");
}
flags.apply = ei_chk_strdup(argv[i+1]);
i++;
break;
case '?':
usage(progname);
default:
usage_error(progname, argv[i]);
}
}
i++;
} /* while */
/*
* Can't have them both !
*/
if (flags.modp && flags.evalp) {
usage(progname);
}
/*
* Read an Erlang module from stdin.
*/
if (flags.modp) {
modsize = get_module(&module, &modname);
}
if (flags.verbosep || flags.debugp) {
fprintf(stderr,"erl_call: "
"node = %s\nCookie = %s\n"
"flags = %s %s %s\n"
"module: name = %s , size = %d\n"
"apply = %s\n",
(flags.node ? flags.node : ""),
(flags.cookie ? flags.cookie : ""),
(flags.startp ? "startp" : ""),
(flags.verbosep ? "verbosep" : ""),
(flags.debugp ? "debugp" : ""),
(modname ? modname : ""), modsize,
(flags.apply ? flags.apply : "" ));
}
/*
* What we, at least, requires !
*/
if (flags.node == NULL) {
usage(progname);
}
if (!flags.cookiep) {
flags.cookie = NULL;
}
/* FIXME decide how many bits etc or leave to connect_xinit? */
creation = (time(NULL) % 3) + 1; /* "random" */
if (flags.hidden == NULL) {
/* As default we are c17@gethostname */
i = flags.randomp ? (time(NULL) % 997) : 17;
flags.hidden = (char *) ei_chk_malloc(10 + 2 ); /* c17 or cXYZ */
#if defined(VXWORKS)
sprintf(flags.hidden, "c%d",
i < 0 ? (int) taskIdSelf() : i);
#else
sprintf(flags.hidden, "c%d",
i < 0 ? (int) getpid() : i);
#endif
}
{
/* A name for our hidden node was specified */
char h_hostname[EI_MAXHOSTNAMELEN+1];
char h_nodename[MAXNODELEN+1];
char *h_alivename=flags.hidden;
struct in_addr h_ipadr;
char* ct;
#ifdef __WIN32__
/*
* FIXME Extremly ugly, but needed to get ei_gethostbyname() below
* to work.
*/
initWinSock();
#endif
if (gethostname(h_hostname, EI_MAXHOSTNAMELEN) < 0) {
fprintf(stderr,"erl_call: failed to get host name: %d\n", errno);
exit(1);
}
if ((hp = ei_gethostbyname(h_hostname)) == 0) {
fprintf(stderr,"erl_call: can't resolve hostname %s\n", h_hostname);
exit(1);
}
/* If shortnames, cut off the name at first '.' */
if (flags.use_long_name == 0 && (ct = strchr(hp->h_name, '.')) != NULL) {
*ct = '\0';
}
strncpy(h_hostname, hp->h_name, EI_MAXHOSTNAMELEN);
h_hostname[EI_MAXHOSTNAMELEN] = '\0';
memcpy(&h_ipadr.s_addr, *hp->h_addr_list, sizeof(struct in_addr));
if (strlen(h_alivename) + strlen(h_hostname) + 2 > sizeof(h_nodename)) {
fprintf(stderr,"erl_call: hostname too long: %s\n", h_hostname);
exit(1);
}
sprintf(h_nodename, "%s@%s", h_alivename, h_hostname);
if (ei_connect_xinit(&ec, h_hostname, h_alivename, h_nodename,
(Erl_IpAddr)&h_ipadr, flags.cookie,
(short) creation) < 0) {
fprintf(stderr,"erl_call: can't create C node %s; %d\n",
h_nodename, erl_errno);
exit(1);
}
}
if ((p = strchr((const char *)flags.node, (int) '@')) == 0) {
strcpy(host_name, ei_thishostname(&ec));
host = host_name;
} else {
*p = 0;
host = p+1;
}
/*
* Expand name to a real name (may be ip-address)
*/
/* FIXME better error string */
if ((hp = ei_gethostbyname(host)) == 0) {
fprintf(stderr,"erl_call: can't ei_gethostbyname(%s)\n", host);
exit(1);
}
/* If shortnames, cut off the name at first '.' */
if (flags.use_long_name == 0 && (ct = strchr(hp->h_name, '.')) != NULL) {
*ct = '\0';
}
strncpy(host_name, hp->h_name, EI_MAXHOSTNAMELEN);
host_name[EI_MAXHOSTNAMELEN] = '\0';
if (strlen(flags.node) + strlen(host_name) + 2 > sizeof(nodename)) {
fprintf(stderr,"erl_call: nodename too long: %s\n", flags.node);
exit(1);
}
sprintf(nodename, "%s@%s", flags.node, host_name);
/*
* Try to connect. Start an Erlang system if the
* start option is on and no system is running.
*/
if (flags.startp && !flags.haltp) {
fd = do_connect(&ec, nodename, &flags);
} else if ((fd = ei_connect(&ec, nodename)) < 0) {
/* We failed to connect ourself */
/* FIXME do we really know we failed because of node not up? */
if (flags.haltp) {
exit(0);
} else {
fprintf(stderr,"erl_call: failed to connect to node %s\n",
nodename);
exit(1);
}
}
/* If we are connected and the halt switch is set */
if (fd && flags.haltp) {
int i = 0;
char *p;
ei_x_buff reply;
ei_encode_empty_list(NULL, &i);
p = (char *)ei_chk_malloc(i);
i = 0; /* Reset */
ei_encode_empty_list(p, &i);
ei_x_new_with_version(&reply);
/* FIXME if fails we want to exit != 0 ? */
ei_rpc(&ec, fd, "erlang", "halt", p, i, &reply);
free(p);
ei_x_free(&reply);
exit(0);
}
if (flags.verbosep) {
fprintf(stderr,"erl_call: we are now connected to node \"%s\"\n",
nodename);
}
/*
* Compile the module read from stdin.
*/
if (flags.modp && (modname != NULL)) {
char fname[256];
if (strlen(modname) + 4 + 1 > sizeof(fname)) {
fprintf(stderr,"erl_call: module name too long: %s\n", modname);
exit(1);
}
strcpy(fname, modname);
strcat(fname, ".erl");
/*
* ei_format("[~s,~w]", fname, erl_mk_binary(module, modsize));
*/
{
int i = 0;
char *p;
ei_x_buff reply;
ei_encode_list_header(NULL, &i, 2);
ei_encode_string(NULL, &i, fname);
ei_encode_binary(NULL, &i, module, modsize);
ei_encode_empty_list(NULL, &i);
p = (char *)ei_chk_malloc(i);
i = 0; /* Reset */
ei_encode_list_header(p, &i, 2);
ei_encode_string(p, &i, fname);
ei_encode_binary(p, &i, module, modsize);
ei_encode_empty_list(p, &i);
ei_x_new_with_version(&reply);
if (ei_rpc(&ec, fd, "file", "write_file", p, i, &reply) < 0) {
free(p);
ei_x_free(&reply);
fprintf(stderr,"erl_call: can't write to source file %s\n",
fname);
exit(1);
}
free(p);
ei_x_free(&reply);
}
/* Compile AND load file on other node */
{
int i = 0;
char *p;
ei_x_buff reply;
ei_encode_list_header(NULL, &i, 2);
ei_encode_atom(NULL, &i, fname);
ei_encode_empty_list(NULL, &i);
ei_encode_empty_list(NULL, &i);
p = (char *)ei_chk_malloc(i);
i = 0; /* Reset */
ei_encode_list_header(p, &i, 2);
ei_encode_atom(p, &i, fname);
ei_encode_empty_list(p, &i);
ei_encode_empty_list(p, &i);
ei_x_new_with_version(&reply);
/* erl_format("[~a,[]]", modname) */
if (ei_rpc(&ec, fd, "c", "c", p, i, &reply) < 0) {
free(p);
ei_x_free(&reply);
fprintf(stderr,"erl_call: can't compile file %s\n", fname);
}
free(p);
/* FIXME complete this code
FIXME print out error message as term
if (!erl_match(erl_format("{ok,_}"), reply)) {
fprintf(stderr,"erl_call: compiler errors\n");
}
*/
ei_x_free(&reply);
}
}
/*
* Eval the Erlang functions read from stdin/
*/
if (flags.evalp) {
char *evalbuf;
int len;
len = read_stdin(&evalbuf);
{
int i = 0;
char *p;
ei_x_buff reply;
ei_encode_list_header(NULL, &i, 1);
ei_encode_binary(NULL, &i, evalbuf, len);
ei_encode_empty_list(NULL, &i);
p = (char *)ei_chk_malloc(i);
i = 0; /* Reset */
ei_encode_list_header(p, &i, 1);
ei_encode_binary(p, &i, evalbuf, len);
ei_encode_empty_list(p, &i);
ei_x_new_with_version(&reply);
/* erl_format("[~w]", erl_mk_binary(evalbuf,len))) */
if (ei_rpc(&ec, fd, "lib", "eval_str", p, i, &reply) < 0) {
fprintf(stderr,"erl_call: evaluating input failed: %s\n",
evalbuf);
free(p);
free(evalbuf); /* Allocated in read_stdin() */
ei_x_free(&reply);
exit(1);
}
i = 0;
ei_print_term(stdout,reply.buff,&i);
free(p);
free(evalbuf); /* Allocated in read_stdin() */
ei_x_free(&reply);
}
}
/*
* Any Erlang call to be made ?
*/
if (flags.apply != NULL) {
char *mod,*fun,*args;
ei_x_buff e, reply;
split_apply_string(flags.apply, &mod, &fun, &args);
if (flags.verbosep) {
fprintf(stderr,"erl_call: module = %s, function = %s, args = %s\n",
mod, fun, args);
}
ei_x_new(&e); /* No version to ei_rpc() */
if (ei_x_format_wo_ver(&e, args) < 0) {
/* FIXME no error message and why -1 ? */
exit(-1);
}
ei_x_new_with_version(&reply);
if (ei_rpc(&ec, fd, mod, fun, e.buff, e.index, &reply) < 0) {
/* FIXME no error message and why -1 ? */
ei_x_free(&e);
ei_x_free(&reply);
exit(-1);
} else {
int i = 0;
ei_print_term(stdout,reply.buff,&i);
ei_x_free(&e);
ei_x_free(&reply);
}
}
return(0);
}
/***************************************************************************
*
* XXXXX
*
***************************************************************************/
/*
* This function does only return on success.
*/
static int do_connect(ei_cnode *ec, char *nodename, struct call_flags *flags)
{
int fd;
int start_flags;
int r;
start_flags = ERL_START_ENODE |
(flags->use_long_name? ERL_START_LONG : 0) |
(flags->verbosep? ERL_START_VERBOSE : 0) |
(flags->debugp? ERL_START_DEBUG : 0);
if ((fd = ei_connect(ec, nodename)) >= 0) {
/* success */
if (flags->verbosep) {
fprintf(stderr,"erl_call: now connected to node %s\n", nodename);
}
} else {
char alive[EI_MAXALIVELEN+1];
char *hostname;
struct hostent *h;
char *cookieargs[3];
char **args;
cookieargs[0] = "-setcookie";
cookieargs[1] = flags->cookie;
cookieargs[2] = NULL;
args = (flags->cookie) ? cookieargs : NULL;
if (!(hostname = strrchr(nodename,'@'))) {
return ERL_BADARG;
}
strncpy(alive,nodename,hostname-nodename);
alive[hostname-nodename] = 0x0;
hostname++;
h = ei_gethostbyname(hostname);
if ((r=erl_start_sys(ec,alive,(Erl_IpAddr)(h->h_addr_list[0]),
start_flags,flags->script,args)) < 0) {
fprintf(stderr,"erl_call: unable to start node, error = %d\n", r);
exit(1);
}
if ((fd=ei_connect(ec, nodename)) >= 0) {
/* success */
if (flags->verbosep) {
fprintf(stderr,"erl_call: now connected to node \"%s\"\n",
nodename);
}
} else {
/* (failure) */
switch (fd) {
case ERL_NO_DAEMON:
fprintf(stderr,"erl_call: no epmd running\n");
exit(1);
case ERL_CONNECT_FAIL:
fprintf(stderr,"erl_call: connect failed\n");
exit(1);
case ERL_NO_PORT:
fprintf(stderr,"erl_call: node is not running\n");
exit(1);
case ERL_TIMEOUT:
fprintf(stderr,"erl_call: connect timed out\n");
exit(1);
default:
fprintf(stderr,"erl_call: error during connect\n");
exit(1);
}
}
}
return fd;
} /* do_connect */
#define SKIP_SPACE(s) while(isspace((int)*(s))) (s)++
#define EAT(s) while (!isspace((int)*(s)) && (*(s) != '\0')) (s)++
static void split_apply_string(char *str,
char **mod,
char **fun,
char **args)
{
char *begin=str;
char *start="start";
char *empty_list="[]";
int len;
SKIP_SPACE(str);
if (*str == '\0') {
fprintf(stderr,"erl_call: wrong format of apply string (1)\n");
exit(1);
}
EAT(str);
len = str-begin;
*mod = (char *) ei_chk_calloc(len + 1, sizeof(char));
memcpy(*mod, begin, len);
SKIP_SPACE(str);
if (*str == '\0') {
*fun = ei_chk_strdup(start);
*args = ei_chk_strdup(empty_list);
return;
}
begin = str;
EAT(str);
len = str-begin;
*fun = (char *) ei_chk_calloc(len + 1, sizeof(char));
memcpy(*fun, begin, len);
SKIP_SPACE(str);
if (*str == '\0') {
*args = ei_chk_strdup(empty_list);
return;
}
*args = ei_chk_strdup(str);
return;
} /* split_apply_string */
/*
* Read from stdin until EOF is reached.
* Allocate the buffer needed.
*/
static int read_stdin(char **buf)
{
int bsize = BUFSIZ;
int len = 0;
int i;
char *tmp = (char *) ei_chk_malloc(bsize);
while (1) {
if ((i = read(0, &tmp[len], bsize-len)) < 0) {
fprintf(stderr,"erl_call: can't read stdin, errno = %d", errno);
exit(1);
} else if (i == 0) {
break;
} else {
len += i;
if ((len+50) > bsize) {
bsize = len * 2;
tmp = (char *) ei_chk_realloc(tmp, bsize);
} else {
continue;
}
}
} /* while */
*buf = tmp;
return len;
} /* read_stdin */
/*
* Get the module from stdin.
*/
static int get_module(char **mbuf, char **mname)
{
char *tmp;
int len,i;
len = read_stdin(mbuf);
/*
* Now, get the module name.
*/
if ((tmp = strstr(*mbuf, "-module(")) != NULL) {
char *start;
tmp += strlen("-module(");
while ((*tmp) == ' ') tmp++; /* eat space */
start = tmp;
while (1) {
if (isalnum((int)*tmp) || (*tmp == '_')) {
tmp++;
continue;
} else {
break;
}
} /* while */
i = tmp - start;
*mname = (char *) ei_chk_calloc(i+1, sizeof(char));
memcpy(*mname, start, i);
}
if (*mbuf)
free(*mbuf); /* Allocated in read_stdin() */
return len;
} /* get_module */
/***************************************************************************
*
* Different error reporting functions that output usage
*
***************************************************************************/
static void usage_noexit(const char *progname) {
fprintf(stderr,"\nUsage: %s [-[demqrsv]] [-c Cookie] [-h HiddenName] \n", progname);
fprintf(stderr," [-x ErlScript] [-a [Mod [Fun [Args]]]]\n");
fprintf(stderr," (-n Node | -sname Node | -name Node)\n\n");
#ifdef __WIN32__
fprintf(stderr," where: -a apply(Mod,Fun,Args) (e.g -a \"erlang length [[a,b,c]]\"\n");
#else
fprintf(stderr," where: -a apply(Mod,Fun,Args) (e.g -a 'erlang length [[a,b,c]]'\n");
#endif
fprintf(stderr," -c cookie string; by default read from ~/.erlang.cookie\n");
fprintf(stderr," -d direct Erlang output to ~/.erl_call.out.<Nodename>\n");
fprintf(stderr," -e evaluate contents of standard input (e.g echo \"X=1,Y=2,{X,Y}.\"|erl_call -e ...)\n");
fprintf(stderr," -h specify a name for the erl_call client node\n");
fprintf(stderr," -m read and compile Erlang module from stdin\n");
fprintf(stderr," -n name of Erlang node, same as -name\n");
fprintf(stderr," -name name of Erlang node, expanded to a fully qualified\n");
fprintf(stderr," -sname name of Erlang node, short form will be used\n");
fprintf(stderr," -q halt the Erlang node (overrides the -s switch)\n");
fprintf(stderr," -r use a random name for the erl_call client node\n");
fprintf(stderr," -s start a new Erlang node if necessary\n");
fprintf(stderr," -v verbose mode, i.e print some information on stderr\n");
fprintf(stderr," -x use specified erl start script, default is erl\n");
}
static void usage_arg(const char *progname, const char *switchname) {
fprintf(stderr, "Missing argument(s) for \'%s\'.\n", switchname);
usage_noexit(progname);
exit(1);
}
static void usage_error(const char *progname, const char *switchname) {
fprintf(stderr, "Illegal argument \'%s\'.\n", switchname);
usage_noexit(progname);
exit(1);
}
static void usage(const char *progname) {
usage_noexit(progname);
exit(0);
}
/***************************************************************************
*
* OS specific functions
*
***************************************************************************/
#ifdef __WIN32__
/*
* FIXME This should not be here. This is a quick fix to make erl_call
* work at all on Windows NT.
*/
static void initWinSock(void)
{
WORD wVersionRequested;
WSADATA wsaData;
int err;
static int initialized;
wVersionRequested = MAKEWORD(1, 1);
if (!initialized) {
initialized = 1;
err = WSAStartup(wVersionRequested, &wsaData);
if (err != 0) {
fprintf(stderr,"erl_call: "
"Can't initialize windows sockets: %d\n", err);
}
if ( LOBYTE( wsaData.wVersion ) != 1 ||
HIBYTE( wsaData.wVersion ) != 1 ) {
fprintf(stderr,"erl_call: This version of "
"windows sockets not supported\n");
WSACleanup();
}
}
}
#endif
/***************************************************************************
*
* Utility functions
*
***************************************************************************/
static void* ei_chk_malloc(size_t size)
{
void *p = malloc(size);
if (p == NULL) {
fprintf(stderr,"erl_call: insufficient memory\n");
exit(1);
}
return p;
}
static void* ei_chk_calloc(size_t nmemb, size_t size)
{
void *p = calloc(nmemb, size);
if (p == NULL) {
fprintf(stderr,"erl_call: insufficient memory\n");
exit(1);
}
return p;
}
static void* ei_chk_realloc(void *old, size_t size)
{
void *p = realloc(old, size);
if (!p) {
fprintf(stderr, "erl_call: cannot reallocate %u bytes of memory from %p\n",
(unsigned) size, old);
exit (1);
}
return p;
}
static char* ei_chk_strdup(char *s)
{
char *p = strdup(s);
if (p == NULL) {
fprintf(stderr,"erl_call: insufficient memory\n");
exit(1);
}
return p;
}
|
# Issues and merge requests
* [ ] Finished the milestone/targeted issues?
* [ ] Relabeled issues with `status:wip` that are not work in progress?
# Code
* [ ] Updated the version in the Gradle build file?
* [ ] Updated the version in the configuration?
* [ ] Got all pipelines to succeed?
* [ ] Made a real-world test among maintainers?
# Documentation
* [ ] Updated the `release-notes` file?
* [ ] Updated the `CHANGELOG.md` file?
* [ ] Updated the `README.md` file?
* [ ] Updated the manual?
# Packaging
* [ ] Created an entry in the releases section?
* [ ] Uploaded to CTAN?
|
public interface ExtralifePiece{
/**
* Represents an Extralife piece view object
* @param cellSize - the size of a cell in the view Grid
*/
ExtralifePiece(int cellSize);
/**
* Creates a circle representing the Extralife piece that can be positioned in the grid
* @return Java fx node
*/
@Override
Node makeNode();
}
|
# gaea
Gaea['dʒi:ə] 盖亚, 一个轻量级RPC业务框架
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package helperClasses;
import javafx.scene.Group;
import javafx.scene.Scene;
import javafx.scene.text.Text;
/**
*
* @author dholmes
*/
public class TextFit {
public static int getTextFontSize(String text, int starting, double maxSize){
Scene testScene;
int fontSize = starting;
boolean shrink = true;
Text testLabel = new Text(text);
testLabel.setStyle("-fx-font-size: "+fontSize+"px;" + "-fx-alignment: center;"
+ "-fx-font-family: Belgium;");
do{
testScene = new Scene(new Group(testLabel));
testLabel.applyCss();
double width = testLabel.getLayoutBounds().getWidth();
if(width > maxSize){
fontSize -= 2;
testLabel.setStyle("-fx-font-size: " + fontSize +"px;" + "-fx-alignment: center;"
+ "-fx-font-family: Belgium;");
}else{
shrink = false;
}
}while(shrink);
return fontSize;
}
}
|
#!/usr/bin/env ruby
require 'rsm'
require 'logreader'
require 'stringio'
class LaserData
def valid_cartesian_points
(0..nrays-1).map{|i|
valid[i] ? p[i] : nil
}.compact
end
end
module MathUtils
def transform_vector(v, x)
v.map{|p| transform(p,x) }
end
def compute_bbox(points)
points = points.compact
bmin = Vector[points[0][0],points[0][1]].col
bmax = Vector[points[0][0],points[0][1]].col
points.each {|p|
bmin[0] = min(bmin[0], p[0])
bmin[1] = min(bmin[1], p[1])
bmax[0] = max(bmax[0], p[0])
bmax[1] = max(bmax[1], p[1])
}
return bmin, bmax
end
end
require 'cairo'
class Canvas
def initialize(output, bbox_min, bbox_max)
@bmin = bbox_min
@bmax = bbox_max
format = Cairo::FORMAT_ARGB32
@width = 500
@height = @width *( @bmax[1]-@bmin[1])/( @bmax[0]-@bmin[0])
@output = StringIO.new
surface = Cairo::PDFSurface.new(output, @width, @height)
@cr = Cairo::Context.new(surface)
# fill background with white
@cr.set_source_rgba(1.0, 1.0, 1.0, 0.8)
@cr.paint
@cr.set_line_join(Cairo::LINE_JOIN_MITER)
@cr.set_line_width 1
@colors = [
[1,0,0],[0.8,0,0],[0.7,0,0],[0.6,0,0],[0.5,0,0],
[0,1,0],[0,0.8,0],[0,0.7,0],[0,0.6,0],[0,0.5,0],
]
@current_color = 0
next_color
end
def save
@cr.show_page
@cr.target.finish
# @cr.target.write_to_png(file)
end
def w2b(p)
x = (p[0] - @bmin[0]) / (@bmax[0]-@bmin[0]) * @width
y = (p[1] - @bmin[1]) / (@bmax[1]-@bmin[1]) * @height
[x, y]
end
def move_to(p)
buf = w2b(p)
@cr.move_to(buf[0],buf[1])
end
def line_to(p)
buf = w2b(p)
@cr.line_to(buf[0],buf[1])
end
def next_color
@current_color = (@current_color+1) % @colors.size
rgb = @colors[@current_color]
@cr.set_source_rgb(rgb[0],rgb[1],rgb[2])
end
def write_points(points)
return if points.empty?
@cr.set_line_cap(Cairo::LINE_CAP_ROUND)
@cr.set_line_width(1)
next_color
move_to(w2b(points[0]))
points.each do |p|
b = w2b(p)
# @cr.arc( b[0], b[1], 1, 0, 2*M_PI);
# @cr.fill
@cr.move_to(b[0],b[1])
@cr.close_path
@cr.stroke
end
end
def cr; @cr end
end
def carmen2pic(input,output_file)
include MathUtils
count = 0; interval = 10
lds = []
until input.eof?
ld = LogReader.shift_laser(input)
break if ld.nil?
if count%interval == 0
puts count
lds.push ld
end
count+=1
end
puts "Read #{lds.size} laser scans."
bbox_min = nil
bbox_max = nil
all_points = []
lds.each_index do |i|
ld = lds[i]
ld.compute_cartesian
points = ld.valid_cartesian_points
points = transform_vector(points, ld.estimate)
bbox_min, bbox_max = compute_bbox(points+[bbox_min,bbox_max])
all_points << points
end
puts "bbox: #{bbox_min.row} to #{bbox_max}"
output = File.open(output_file ,"wb")
ca = Canvas.new(output, bbox_min, bbox_max)
ca.cr.set_line_width(2)
ca.move_to(lds[0].estimate)
lds.each_index do |i|
ca.line_to(lds[i].estimate)
end
ca.cr.stroke
lds.each_index do |i|
ca.write_points(all_points[i])
end
ca.save()
end
if ARGV.size < 2
puts "carmen2pic <input> <output> "
exit 1
end
input = File.open ARGV[0]
output_file = ARGV[1]
carmen2pic(input,output_file)
|
package reflect
import (
"fmt"
"reflect"
)
type Employer interface {
get() string
}
type Employ struct {
name string
}
func (e *Employ) get() string {
return e.name
}
// 在GO语言中,使用refect.TypeOf()函数可以获得任意值的类型对象,程序通过类型对象可以访问任意值的类型信息
func reflectType(e interface{}) {
v := reflect.TypeOf(e)
//res := v.get()
fmt.Printf("type:%T, value:%v\n", v, v)
}
func TestReflect() {
var e Employer
e1 := &Employ{"jack"}
e = e1
reflectType(e)
i := 1
reflectType(i)
}
|
---
id: intro
slug: /
---
# Overview
Here you'll find ways to get started, understand core concepts, and explore
a variety of resources to help you get the most from Tigris data platform. We
also recommend joining our
[Slack Community](https://join.slack.com/t/tigrisdatacommunity/shared_invite/zt-16fn5ogio-OjxJlgttJIV0ZDywcBItJQ)
for help along the way.
## What is Tigris?
[Tigris](https://github.com/tigrisdata/tigris) is a modern, scalable backend
for building real-time websites and apps. The zero-ops approach of Tigris
means developers can focus on their application rather than managing
databases or data pipelines. While our API makes it easy to get started with
any data architecture needs!
We are fully committed to open source and embrace the open exchange of
information, collaborative development and transparency.
Tigris is licensed under the terms of the
[Apache License v2.0](http://www.apache.org/licenses/LICENSE-2.0)
|
require 'rspec/expectations'
module RSpec
module Matchers
# Matchers for testing RSpec matchers. Include them with:
#
# require 'rspec/matchers/fail_matchers'
# RSpec.configure do |config|
# config.include RSpec::Matchers::FailMatchers
# end
#
module FailMatchers
# Matches if an expectation fails
#
# @example
# expect { some_expectation }.to fail
def fail(&block)
raise_error(RSpec::Expectations::ExpectationNotMetError, &block)
end
# Matches if an expectation fails with the provided message
#
# @example
# expect { some_expectation }.to fail_with("some failure message")
# expect { some_expectation }.to fail_with(/some failure message/)
def fail_with(message)
raise_error(RSpec::Expectations::ExpectationNotMetError, message)
end
# Matches if an expectation fails including the provided message
#
# @example
# expect { some_expectation }.to fail_including("portion of some failure message")
def fail_including(*snippets)
raise_error(
RSpec::Expectations::ExpectationNotMetError,
a_string_including(*snippets)
)
end
end
end
end
|
---
name: "\U0001F914 General question"
about: Ask a general question about Milvus
title: ''
labels: ''
assignees: ''
---
**What is your question?**
|
<?php
namespace App\Http\Controllers;
use App\Product;
use App\Http\Requests\ProductsRequest as Request;
use Carbon\Carbon;
use Illuminate\Support\Facades\Cache;
class ProductController extends Controller
{
//
public function index(){
dump(\Auth::user());
/*$minutes = Carbon::now()->addMinutes(1);
$all = Cache::remember('api::products', $minutes, function() use ($company_id) {
return Product::where('company_id', $company_id)->get();
});*/
return Product::all();
}
public function store(Request $request){
Cache::forget('api::products');
$data = $request->all();
$data['user_id'] = $request->user()->id;
return Product::create($data);
}
public function update(Request $request, Product $product){
$product->update($request->all());
return $product;
}
public function destroy(Product $product){
$this->authorize('delete', $product);
$product->delete();
return $product;
}
public function show(Product $product){
return $product;
}
}
|
package no.nav.foreldrepenger.vtp.server.rest.azuread.navansatt;
import io.swagger.annotations.Api;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import java.util.HashMap;
import java.util.Map;
@Api(tags = {"AzureAd"})
@Path("/MicrosoftGraphApi")
public class MicrosoftGraphApiMock {
@GET
@Produces({ "application/json;charset=UTF-8" })
@Path("/v1.0/me")
public Response me(@QueryParam("select") String select) {
// TODO: read Authorization header, maybe we can guess onPremisesSamAccountName from values there
Map<String, String> response = new HashMap<>();
response.put("@odata.context", "https://graph.microsoft.com/v1.0/$metadata#users(" + select + ")/$entity");
response.put("onPremisesSamAccountName", "Z123456");
return Response.ok(response).build();
}
}
|
# gostack-primeiro-projeto-react
Primeiro projeto React para aprender os fundamentos de ReactJS
...
...
...
...
...
...
|
# frozen_string_literal: true
require 'webmock'
require 'webmock/rspec'
# This prevents Selenium/WebMock from spawning thousands of connections
# while waiting for an element to appear via Capybara's find:
# https://github.com/teamcapybara/capybara/issues/2322#issuecomment-619321520
def webmock_enable_with_http_connect_on_start!
webmock_enable!(net_http_connect_on_start: true)
end
def webmock_enable!(options = {})
WebMock.disable_net_connect!(
{
allow_localhost: true,
allow: []
}.merge(options)
)
end
webmock_enable!
|
#!/usr/bin/env bash
# Create an empty .thy.yml. If there already exists a working config, move it out temporarily and bring back later.
config_exists=false
if [ -f "$HOME/.thy.yml" ]; then
mv "$HOME/.thy.yml" "$HOME/.thy2.yml"
config_exists=true
fi
if [[ -v CONSTANTS_CLINAME ]]; then
CONSTANTS_CLINAME=$CONSTANTS_CLINAME
else
CONSTANTS_CLINAME=dsv
fi
echo "CLI NAME: ${CONSTANTS_CLINAME}"
touch "$HOME/.thy.yml"
if [[ ! -v BINARY_PATH ]]; then
cd ..
if [[ "$IS_SYSTEM_TEST" == "true" ]]; then
make build-test
mv $CONSTANTS_CLINAME.test inittests/$CONSTANTS_CLINAME
else
make
mv $CONSTANTS_CLINAME inittests/$CONSTANTS_CLINAME
fi
cd inittests
source .defaultvars
else
cp $BINARY_PATH/*/$CONSTANTS_CLINAME-linux-x64 ./$CONSTANTS_CLINAME
chmod +x $CONSTANTS_CLINAME
fi
source .env
python3 tests.py
deactivate
rm $CONSTANTS_CLINAME
# Return the original config, if it had existed.
if [ "$config_exists" == true ]; then
mv "$HOME/.thy2.yml" "$HOME/.thy.yml"
fi
|
atom_feed(
language: "de-DE",
schema_date: "2015",
url: searches_url(search_request: @search_request, scope: current_scope, format: :atom),
root_url: searches_url(search_request: @search_request, scope: current_scope)
) do |feed|
feed.title "Suchergebnisse für #{@search_request.queries.map{|q| q.query}.join()}"
feed.author do
feed.name "Universitätsbibliothek Paderborn"
end
if @search_result.present? && @search_result.hits.present?
@search_result.hits.each do |hit|
feed.entry(hit, url: record_url(hit.record.id, scope: current_scope)) do |entry|
entry.title(title(hit.record))
content = ""
#if (is_part_of = is_part_of(hit.record, scope: current_scope)).present?
# content << "<div>#{is_part_of.html_safe}</div>"
#end
if (info = additional_record_info(hit.record)).present?
content << "<div>#{info}</div>"
end
#if journal_stock = journal_stock(hit.record)
# content << "<div>Bestand UB: #{journal_stock}</div>"
#end
entry.content(content, type: "html")
end
end
end
end
|
from django.contrib.syndication.views import Feed
from property.models import Property
from dashboard.models import Post
from django.utils.feedgenerator import Atom1Feed
from itertools import chain
class RssSiteNewsFeed(Feed):
title = "ZRealty Corp site news"
link = "/feeds/"
description = "Latest Feeds"
def get_context_data(self, **kwargs):
context = super(RssSiteNewsFeed, self).get_context_data(**kwargs)
return context
def item_link(self, item):
"""
Takes an item, as returned by items(), and returns the item's URL.
"""
return 'http://zrealtycorp.com/property/' + str(item.property_id)
def items(self):
properties = Property.objects.order_by('-published')[:15]
return properties
class RssPostsFeed(Feed):
title = "ZRealty Corp site posts"
link = "/feeds/"
description = "Latest Feeds"
def get_context_data(self, **kwargs):
context = super(RssPostsFeed, self).get_context_data(**kwargs)
return context
def item_link(self, item):
"""
Takes an item, as returned by items(), and returns the item's URL.
"""
return 'http://zrealtycorp.com/property/' + str(item.id)
def items(self):
posts = Post.objects.order_by('-published')[:15]
return posts
class AtomSiteNewsFeed(RssSiteNewsFeed):
feed_type = Atom1Feed
subtitle = RssSiteNewsFeed.description
|
using FlashcardsCourseProject.Models;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Xamarin.Forms;
namespace FlashcardsCourseProject.Services
{
public class FileImageDataStore : IDataStore<FileImage>
{
private ApplicationContext _db => DependencyService.Get<ApplicationContext>();
public FileImageDataStore()
{
_db.Database.EnsureCreated();
}
public async Task<bool> AddItemAsync(FileImage item)
{
_db.FileImage.Add(item);
_db.SaveChanges();
return await Task.FromResult(true);
}
public async Task<bool> UpdateItemAsync(FileImage item)
{
var oldItem = _db.FileImage.Where(a => a.Id == item.Id).FirstOrDefault();
oldItem.Name = item.Name;
oldItem.Path = item.Path;
_db.FileImage.Update(oldItem);
_db.SaveChanges();
return await Task.FromResult(true);
}
public async Task<bool> DeleteItemAsync(int id)
{
var oldItem = _db.FileImage.Where(a => a.Id == id).FirstOrDefault();
_db.FileImage.Remove(oldItem);
_db.SaveChanges();
return await Task.FromResult(true);
}
public async Task<FileImage> GetItemAsync(int id)
{
return await Task.FromResult(_db.FileImage.FirstOrDefault(a => a.Id == id));
}
public async Task<IEnumerable<FileImage>> GetItemsAsync(int? cardId = null)
{
return await Task.FromResult(_db.FileImage.ToList());
}
}
}
|
! RUN: %S/test_errors.sh %s %t %f18 -fopenmp
! XFAIL: *
! OpenMP Version 4.5
! 2.7.3 single Construct
! Copyprivate variable is not thread private or private in outer context
program omp_single
integer i
i = 10
!$omp parallel
!$omp single
print *, "omp single", i
!ERROR: copyprivate variable ‘i’ is not threadprivate or private
!$omp end single copyprivate(i)
!$omp end parallel
end program omp_single
|
#!/bin/bash
# *****************************************************************************
# FILE: env_nwm_r2.sh
# AUTHOR: Matt Masarik (MM)
# VERSION: 0 2019-01-19 MM Base version
#
# PURPOSE: Provides evironment variables and modules required for the
# National Water Model (NWM) version of WRF-Hydro, on R2.
#
# USAGE: source env_nwm_r2.sh
# *****************************************************************************
# unload any auto-loaded modules
module purge
# now load modules
module load shared
module load git/64/2.12.2
module load slurm/17.11.8
module load intel/compiler/64/2018/18.0.3
module load intel/mpi/64/2018/3.222
module load intel/mkl/64/2018/3.222
module load hdf5_18/intel/1.8.18-mpi
module load netcdf/intel/64/4.4.1
module load udunits/intel/64/2.2.24
# export netCDF env variable
export NETCDF=/cm/shared/apps/netcdf/intel/64/4.4.1
|
; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
; RUN: opt %s -instcombine -S | FileCheck %s
declare void @llvm.assume(i1)
declare i8 @gen8()
declare void @use8(i8)
define i1 @t0(i8 %base, i8 %offset) {
; CHECK-LABEL: @t0(
; CHECK-NEXT: [[CMP:%.*]] = icmp slt i8 [[OFFSET:%.*]], 0
; CHECK-NEXT: call void @llvm.assume(i1 [[CMP]])
; CHECK-NEXT: [[ADJUSTED:%.*]] = sub i8 [[BASE:%.*]], [[OFFSET]]
; CHECK-NEXT: call void @use8(i8 [[ADJUSTED]])
; CHECK-NEXT: [[RES:%.*]] = icmp uge i8 [[BASE]], [[OFFSET]]
; CHECK-NEXT: ret i1 [[RES]]
;
%cmp = icmp slt i8 %offset, 0
call void @llvm.assume(i1 %cmp)
%adjusted = sub i8 %base, %offset
call void @use8(i8 %adjusted)
%res = icmp ult i8 %adjusted, %base
ret i1 %res
}
define i1 @t1(i8 %base, i8 %offset) {
; CHECK-LABEL: @t1(
; CHECK-NEXT: [[CMP:%.*]] = icmp slt i8 [[OFFSET:%.*]], 0
; CHECK-NEXT: call void @llvm.assume(i1 [[CMP]])
; CHECK-NEXT: [[ADJUSTED:%.*]] = sub i8 [[BASE:%.*]], [[OFFSET]]
; CHECK-NEXT: call void @use8(i8 [[ADJUSTED]])
; CHECK-NEXT: [[RES:%.*]] = icmp ult i8 [[BASE]], [[OFFSET]]
; CHECK-NEXT: ret i1 [[RES]]
;
%cmp = icmp slt i8 %offset, 0
call void @llvm.assume(i1 %cmp)
%adjusted = sub i8 %base, %offset
call void @use8(i8 %adjusted)
%res = icmp uge i8 %adjusted, %base
ret i1 %res
}
define i1 @t2(i8 %offset) {
; CHECK-LABEL: @t2(
; CHECK-NEXT: [[CMP:%.*]] = icmp slt i8 [[OFFSET:%.*]], 0
; CHECK-NEXT: call void @llvm.assume(i1 [[CMP]])
; CHECK-NEXT: [[BASE:%.*]] = call i8 @gen8()
; CHECK-NEXT: [[ADJUSTED:%.*]] = sub i8 [[BASE]], [[OFFSET]]
; CHECK-NEXT: call void @use8(i8 [[ADJUSTED]])
; CHECK-NEXT: [[RES:%.*]] = icmp uge i8 [[BASE]], [[OFFSET]]
; CHECK-NEXT: ret i1 [[RES]]
;
%cmp = icmp slt i8 %offset, 0
call void @llvm.assume(i1 %cmp)
%base = call i8 @gen8()
%adjusted = sub i8 %base, %offset
call void @use8(i8 %adjusted)
%res = icmp ugt i8 %base, %adjusted
ret i1 %res
}
define i1 @t3(i8 %offset) {
; CHECK-LABEL: @t3(
; CHECK-NEXT: [[CMP:%.*]] = icmp slt i8 [[OFFSET:%.*]], 0
; CHECK-NEXT: call void @llvm.assume(i1 [[CMP]])
; CHECK-NEXT: [[BASE:%.*]] = call i8 @gen8()
; CHECK-NEXT: [[ADJUSTED:%.*]] = sub i8 [[BASE]], [[OFFSET]]
; CHECK-NEXT: call void @use8(i8 [[ADJUSTED]])
; CHECK-NEXT: [[RES:%.*]] = icmp ult i8 [[BASE]], [[OFFSET]]
; CHECK-NEXT: ret i1 [[RES]]
;
%cmp = icmp slt i8 %offset, 0
call void @llvm.assume(i1 %cmp)
%base = call i8 @gen8()
%adjusted = sub i8 %base, %offset
call void @use8(i8 %adjusted)
%res = icmp ule i8 %base, %adjusted
ret i1 %res
}
; Here we don't know that offset is non-zero. Can't fold.
define i1 @n4_maybezero(i8 %base, i8 %offset) {
; CHECK-LABEL: @n4_maybezero(
; CHECK-NEXT: [[ADJUSTED:%.*]] = sub i8 [[BASE:%.*]], [[OFFSET:%.*]]
; CHECK-NEXT: call void @use8(i8 [[ADJUSTED]])
; CHECK-NEXT: [[RES:%.*]] = icmp ult i8 [[ADJUSTED]], [[BASE]]
; CHECK-NEXT: ret i1 [[RES]]
;
%adjusted = sub i8 %base, %offset
call void @use8(i8 %adjusted)
%res = icmp ult i8 %adjusted, %base
ret i1 %res
}
; We need to know that about %offset, %base won't do. Can't fold.
define i1 @n5_wrongnonzero(i8 %base, i8 %offset) {
; CHECK-LABEL: @n5_wrongnonzero(
; CHECK-NEXT: [[CMP:%.*]] = icmp sgt i8 [[BASE:%.*]], 0
; CHECK-NEXT: call void @llvm.assume(i1 [[CMP]])
; CHECK-NEXT: [[ADJUSTED:%.*]] = sub i8 [[BASE]], [[OFFSET:%.*]]
; CHECK-NEXT: call void @use8(i8 [[ADJUSTED]])
; CHECK-NEXT: [[RES:%.*]] = icmp ult i8 [[ADJUSTED]], [[BASE]]
; CHECK-NEXT: ret i1 [[RES]]
;
%cmp = icmp sgt i8 %base, 0
call void @llvm.assume(i1 %cmp)
%adjusted = sub i8 %base, %offset
call void @use8(i8 %adjusted)
%res = icmp ult i8 %adjusted, %base
ret i1 %res
}
|
class Adult
def initialize
@adult = Adult.new
end
def consume_an_alcoholic_beverage
count += 1
end
def sober?
consume_an_alcoholic_beverage.count >= 3
end
end
|
using System.Runtime.Serialization;
namespace Ghostware.GPSDLib.Models
{
[DataContract]
public class GpsdVersion
{
[DataMember(Name = "release")]
public string Release { get; set; }
[DataMember(Name = "rev")]
public string Rev { get; set; }
[DataMember(Name = "proto_major")]
public int ProtoMajor { get; set; }
[DataMember(Name = "proto_minor")]
public int ProtoMinor { get; set; }
public override string ToString()
{
return $"Release: {Release} - Revision: {Rev} - ProtoMajor: {ProtoMajor} - ProtoMinor: {ProtoMinor}";
}
}
}
|
from .reservation import OpenReservationsManager
from .thesis import ThesisManager, ThesisApiManager
from .thesis_import import ThesisImportManager
__all__ = [
'OpenReservationsManager',
'ThesisManager',
'ThesisApiManager',
'ThesisImportManager',
]
|
<?php
declare(strict_types = 1);
namespace App\ExternalApi\Isite\Mapper;
use App\Controller\Helpers\IsiteKeyHelper;
use App\ExternalApi\Exception\ParseException;
use App\ExternalApi\Isite\Domain\IsiteImage;
use BBC\ProgrammesPagesService\Domain\ValueObject\Pid;
use DateTimeImmutable;
use InvalidArgumentException;
use Psr\Log\LoggerInterface;
use SimpleXMLElement;
/**
* This is the base mapper for the iSite which provides the basic
* functionality such as creating new instances of the same model
* setting and getting values etc.
*/
abstract class Mapper
{
/** @var IsiteKeyHelper */
protected $isiteKeyHelper;
/** @var MapperFactory */
protected $mapperFactory;
/** @var LoggerInterface */
protected $logger;
public function __construct(MapperFactory $mapperFactory, IsiteKeyHelper $isiteKeyHelper, LoggerInterface $logger)
{
$this->isiteKeyHelper = $isiteKeyHelper;
$this->mapperFactory = $mapperFactory;
$this->logger = $logger;
}
abstract public function getDomainModel(SimpleXMLElement $isiteObject);
protected function getForm(SimpleXMLElement $isiteObject): SimpleXMLElement
{
if (empty($isiteObject->document->form)) {
throw new ParseException('Invalid iSite XML document');
}
return $isiteObject->document->form;
}
/**
* Gets the metadata directly from the header
* @param SimpleXMLElement $isiteObject
* @return SimpleXMLElement
*/
protected function getMetaData(SimpleXMLElement $isiteObject): SimpleXMLElement
{
return $isiteObject->metadata;
}
/**
* Method to get the metadata form isite document
* @param SimpleXMLElement $isiteObject
* @return SimpleXMLElement|null
*/
protected function getFormMetaData(SimpleXMLElement $isiteObject): ?SimpleXMLElement
{
return $this->getForm($isiteObject)->metadata;
}
protected function getProjectSpace(SimpleXMLElement $form): string
{
$namespaces = $form->getNamespaces();
$namespace = reset($namespaces);
$matches = [];
preg_match('{https://production(?:\.int|\.test|\.stage|\.live)?\.bbc\.co\.uk/isite2/project/([^/]+)/}', $namespace, $matches);
if (empty($matches[1])) {
throw new ParseException('iSite XML does not specify project space and is therefore invalid');
}
return $matches[1];
}
protected function getString(?SimpleXMLElement $val): ?string
{
if (empty($val)) {
return null;
}
$val = (string) $val;
return trim($val);
}
protected function isPublished(SimpleXMLElement $context): bool
{
return isset($context->result->metadata->guid);
}
protected function getIsiteImage(?SimpleXMLElement $formMetaDataImage): ?IsiteImage
{
try {
$imagePidString = $this->getString($formMetaDataImage);
if (!empty($imagePidString)) {
$imagePid = new Pid($imagePidString);
$image = new IsiteImage($imagePid);
} else {
$image = null;
}
} catch (InvalidArgumentException $e) {
$image = null;
}
return $image;
}
protected function getDateTime(SimpleXMLElement $val): DateTimeImmutable
{
return new DateTimeImmutable($this->getString($val));
}
}
|
-- Выполните инструкции
USE AdventureWorks;
GO
SELECT @@SPID as select_session_id;
GO
SELECT [Name] FROM [Production].[Product];
|
from .package import package
from .stringify import gen_data_attributes
from .script_data import stringify_script_data
prevent_reinclusion = package(
gen_data_attributes("Prevent reinclusion", "v0.1"),
"js",
"",
"""
var ankiAms = document.querySelectorAll('#anki-am')
if (ankiAms.length > 1) {
for (const am of Array.from(ankiAms).slice(0, -1)) {
am.outerHTML = ''
}
}""".strip(),
[],
)
def get_prevent_reinclusion(indent_size):
return stringify_script_data(prevent_reinclusion, indent_size, True)
|
CREATE UNIQUE INDEX "PK_SHIPMENT_ID" ON "SHIPMENT" ("SHIPMENT_ID")
|
# shellcheck shell=bash
bash_object.trace_loop() {
if [ -n "${TRACE_BASH_OBJECT_TRAVERSE+x}" ]; then
stdtrace.log 0 "-- START LOOP ITERATION"
stdtrace.log 0 "i+1: '$((i+1))'"
stdtrace.log 0 "\${#REPLIES[@]}: ${#REPLIES[@]}"
stdtrace.log 0 "key: '$key'"
stdtrace.log 0 "current_object_name: '$current_object_name'"
stdtrace.log 0 "current_object=("
for debug_key in "${!current_object[@]}"; do
stdtrace.log 0 " [$debug_key]='${current_object[$debug_key]}'"
done
stdtrace.log 0 ")"
fi
}
bash_object.trace_current_object() {
if [ -n "${TRACE_BASH_OBJECT_TRAVERSE+x}" ]; then
stdtrace.log 0 "key: '$key'"
stdtrace.log 0 "current_object_name: '$current_object_name'"
stdtrace.log 0 "current_object=("
for debug_key in "${!current_object[@]}"; do
stdtrace.log 0 " [$debug_key]='${current_object[$debug_key]}'"
done
stdtrace.log 0 ")"
fi
}
|
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
namespace DiscUtils.Ntfs
{
using System;
internal sealed class SecurityDescriptorRecord : IByteArraySerializable
{
public uint Hash;
public uint Id;
public long OffsetInFile;
public uint EntrySize;
public byte[] SecurityDescriptor;
public int Size
{
get { return SecurityDescriptor.Length + 0x14; }
}
public bool Read(byte[] buffer, int offset)
{
Hash = Utilities.ToUInt32LittleEndian(buffer, offset + 0x00);
Id = Utilities.ToUInt32LittleEndian(buffer, offset + 0x04);
OffsetInFile = Utilities.ToInt64LittleEndian(buffer, offset + 0x08);
EntrySize = Utilities.ToUInt32LittleEndian(buffer, offset + 0x10);
if (EntrySize > 0)
{
SecurityDescriptor = new byte[EntrySize - 0x14];
Array.Copy(buffer, offset + 0x14, SecurityDescriptor, 0, SecurityDescriptor.Length);
return true;
}
else
{
return false;
}
}
public int ReadFrom(byte[] buffer, int offset)
{
Read(buffer, offset);
return SecurityDescriptor.Length + 0x14;
}
public void WriteTo(byte[] buffer, int offset)
{
EntrySize = (uint)Size;
Utilities.WriteBytesLittleEndian(Hash, buffer, offset + 0x00);
Utilities.WriteBytesLittleEndian(Id, buffer, offset + 0x04);
Utilities.WriteBytesLittleEndian(OffsetInFile, buffer, offset + 0x08);
Utilities.WriteBytesLittleEndian(EntrySize, buffer, offset + 0x10);
Array.Copy(SecurityDescriptor, 0, buffer, offset + 0x14, SecurityDescriptor.Length);
}
}
}
|
exports.seed = function(knex) {
return knex('Users').insert([
{username: 'TestUser1' , password: 'lol123', email: 'test1@gmail.com', name:'John Doe'},
{username: 'TestUser2', password: 'lol123', email: 'test2@gmail.com', name:'Jane Doe'},
{username: 'TestUser3', password: 'lol123', email: 'test3@gmail.com', name:'John Doe'},
])
.then(() => console.log("\n== Seed data for users table added. ==\n"));
};
|
/*
This is a development SQL script to patch Skyline from v2.1.0-patch-dev-4014-dev-only
*/
USE skyline;
/*
# @added 20210414 - Feature #4014: Ionosphere - inference
# Branch #3590: inference
# Added motif related columns to the ionosphere table
*/
ALTER TABLE `ionosphere` ADD COLUMN `motif_matched_count` INT(11) DEFAULT 0 COMMENT 'the number of times a motif from this feature profile has been matched' AFTER `generation`;
COMMIT;
ALTER TABLE `ionosphere` ADD COLUMN `motif_last_matched` INT(10) DEFAULT 0 COMMENT 'the unix timestamp of the last time a motif from this feature profile was matched' AFTER `motif_matched_count`;
COMMIT;
ALTER TABLE `ionosphere` ADD COLUMN `motif_last_checked` INT(10) DEFAULT 0 COMMENT 'the unix timestamp of the last time a motif from this feature profile was checked' AFTER `motif_last_matched`;
COMMIT;
ALTER TABLE `ionosphere` ADD COLUMN `motif_checked_count` INT(10) DEFAULT 0 COMMENT 'the number of times a motifs from this feature profile have been checked' AFTER `motif_last_checked`;
COMMIT;
/*
# @added 20210414 - Feature #4014: Ionosphere - inference
# Branch #3590: inference
# Store the not anomalous motifs
*/
CREATE TABLE IF NOT EXISTS `not_anomalous_motifs` (
`id` INT(11) NOT NULL AUTO_INCREMENT COMMENT 'motif element unique id',
`motif_id` INT(11) NOT NULL COMMENT 'motif id',
`timestamp` INT(10) DEFAULT 0 COMMENT 'motif element unix timestamp',
`value` DECIMAL(65,6) NOT NULL COMMENT 'motif element value',
PRIMARY KEY (id),
INDEX `inference_not_anomalous_motifs` (`id`,`motif_id`))
ENGINE=InnoDB;
INSERT INTO `sql_versions` (version) VALUES ('2.1.0-patch-dev-4014-dev-only');
|
/**
* Flushes the Promises currently running in the PromiseJobs queue
* Inspired from https://github.com/facebook/jest/issues/2157#issuecomment-279171856
*/
export const flushPromises = (): Promise<void> => {
return new Promise((resolve) => setImmediate(resolve));
};
|
package com.packt.chapter6
import akka.actor.{ActorSystem, PoisonPill, Props}
object SafePersistenceActorShutdownApp extends App {
val system = ActorSystem("safe-shutdown")
val persistentActor1 = system.actorOf(Props[SamplePersistenceActor])
val persistentActor2 = system.actorOf(Props[SamplePersistenceActor])
persistentActor1 ! UserUpdate("foo", Add)
persistentActor1 ! UserUpdate("foo", Add)
persistentActor1 ! PoisonPill
persistentActor2 ! UserUpdate("foo", Add)
persistentActor2 ! UserUpdate("foo", Add)
persistentActor2 ! ShutdownPersistentActor
}
|
#!/bin/bash -e
find_run_clang_tidy() {
local CLANG_TIDY_VERSION=$(clang-tidy --version | awk '/LLVM version/ {print $3}')
local MAJOR=$(echo "${CLANG_TIDY_VERSION}" | cut -d. -f1)
local MINOR=$(echo "${CLANG_TIDY_VERSION}" | cut -d. -f2)
local SUFFIX="${MAJOR}.${MINOR}"
for path in /usr/bin /usr/local/clang*/bin \
/usr/lib/llvm-${SUFFIX}/share/clang \
/usr/lib/llvm-${SUFFIX}/bin \
/usr/lib/llvm-${MAJOR}/share/clang \
/usr/lib/llvm-${MAJOR}/bin; do
for name in run-clang-tidy.py run-clang-tidy-${SUFFIX}.py \
run-clang-tidy-${MAJOR}.py; do
if [ -x "$path/$name" ]; then
echo "$path/$name"
return 0
fi
done
done
return 1
}
# --------------------------------------------------------------
# Versions
clang-tidy --version
find_run_clang_tidy
clang-format --version
# --------------------------------------------------------------
# Initialise build directory
# This exports the compilation commands for clang-tidy
mkdir build
pushd build
cmake -DCMAKE_CXX_COMPILER=${CXX} -DCMAKE_C_COMPILER=${CC} ..
popd
# Run clang-tidy (disabled for now)
./scripts/run-clang-tidy.py -run-clang-tidy "$(find_run_clang_tidy)" \
-j ${CORES:-1} -check-output
# --------------------------------------------------------------
# Run clang-format on all files of the repo
./scripts/run-clang-format.py
# Check if one file has changed
if [ "$(git status --untracked-files=no --porcelain=v2 | wc -l)" -gt 0 ]; then
echo "Formatting has changed the following files:"
git status -s --untracked-files=no
echo
echo --------------------------------------------------------
echo
git diff
exit 1
fi
exit 0
|
import 'memory.dart';
const CURRENT_VERSION = '1';
class StringSession extends MemorySession {
/**
* This session file can be easily saved and loaded as a string. According
* to the initial design, it contains only the data that is necessary for
* successful connection and authentication, so takeout ID is not stored.
* It is thought to be used where you don't want to create any on-disk
* files but would still like to be able to save and load existing sessions
* by other means.
* You can use custom `encode` and `decode` functions, if present:
* `encode` definition must be ``function encode(value: Buffer) -> string:``.
* `decode` definition must be ``function decode(value: string) -> Buffer:``.
* @param session {string|null}
*/
StringSession({session: null}) {
if (session!= null) {
if (session[0] != CURRENT_VERSION) {
throw ('Not a valid string');
}
}
}
getAuthKey(dcId) {
return this.authKey;
}
setAuthKey(authKey, dcId) {
this.authKey = authKey;
}
load() async{}
}
|
def test_testing():
assert True
def test_passwd_file(host):
passwd = host.file("/etc/passwd")
assert passwd.contains("root")
assert passwd.user == "root"
assert passwd.group == "root"
assert passwd.mode == 0o644
def test_ssh_is_installed(host):
sshd = host.package("openssh-server")
assert sshd.is_installed
#assert sshd.version.startswith("1.2")
def test_sshd_running_and_enabled(host):
sshd = host.service("sshd")
assert sshd.is_running
assert sshd.is_enabled
|
package com.fabernovel.statefullayout.transitions
import com.fabernovel.statefullayout.State
/**
* Interface to play animation when [State] are shown and hidden.
* @property start play the state transition
*/
interface StateTransition : TransitionListenerHandler {
/**
* Start the transition
* - For enter transition, state visibility is set to visible on
* [StateTransitionListener.onTransitionStart].
* - For exit transition, state visibility is set to gone on
* [StateTransitionListener.onTransitionEnd].
* @param state [State] to play transition with.
* @param listener [StateTransitionListener] used to update [State] visibility.
*/
fun start(state: State, listener: StateTransitionListener)
/**
* Cancel the state transition
*
*/
fun cancel()
}
|
class ManagerController < ApplicationController
include MinerHelper
before_filter :retrieve_miner_data, :only => [:index]
def index
if request.xhr?
render layout: false && return
end
end
def run
@results = begin
if params[:args].present?
@miner_pool.query(params[:command].to_sym, *params[:args].split(','))
else
@miner_pool.query(params[:command].to_sym)
end
rescue StandardError => e
"invalid command and/or parameters; #{e.message}"
end
render partial: 'shared/run', layout: false
end
def manage_pools
threads = @miner_pool.miners.collect do |miner|
Thread.new do
update_pools_for(miner, params)
end
end
threads.each { |thr| thr.join }
@updated = threads.collect(&:value).flatten
@miner = @miner_pool.miners.first
render partial: 'shared/manage_pools', layout: false
end
private
def retrieve_miner_data
@miner_data ||= []
[:summary, :devs, :pools, :stats].each do |type|
last_entry = "CgminerMonitor::Document::#{type.to_s.capitalize}".constantize.last_entry
@miner_pool.miners.each_with_index do |miner, index|
@miner_data[index] ||= {}
if last_entry && last_entry[:results]
@miner_data[index][type] = [{type => last_entry[:results][index]}]
end
end
end
end
end
|
package org.highmed.dsf.fhir.webservice.impl;
import org.highmed.dsf.fhir.dao.GroupDao;
import org.highmed.dsf.fhir.event.EventGenerator;
import org.highmed.dsf.fhir.event.EventManager;
import org.highmed.dsf.fhir.help.ExceptionHandler;
import org.highmed.dsf.fhir.help.ParameterConverter;
import org.highmed.dsf.fhir.help.ResponseGenerator;
import org.highmed.dsf.fhir.service.ReferenceExtractor;
import org.highmed.dsf.fhir.service.ReferenceResolver;
import org.highmed.dsf.fhir.service.ResourceValidator;
import org.highmed.dsf.fhir.webservice.specification.GroupService;
import org.hl7.fhir.r4.model.Group;
public class GroupServiceImpl extends AbstractServiceImpl<GroupDao, Group> implements GroupService
{
public GroupServiceImpl(String resourceTypeName, String serverBase, String path, int defaultPageCount, GroupDao dao,
ResourceValidator validator, EventManager eventManager, ExceptionHandler exceptionHandler,
EventGenerator eventGenerator, ResponseGenerator responseGenerator, ParameterConverter parameterConverter,
ReferenceExtractor referenceExtractor, ReferenceResolver referenceResolver)
{
super(Group.class, resourceTypeName, serverBase, path, defaultPageCount, dao, validator, eventManager,
exceptionHandler, eventGenerator, responseGenerator, parameterConverter, referenceExtractor,
referenceResolver);
}
}
|
/**
* Wechaty - https://github.com/chatie/wechaty
*
* @copyright 2016-2018 Huan LI <zixia@zixia.net>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// tslint:disable:arrow-parens
// tslint:disable:max-line-length
// tslint:disable:member-ordering
// tslint:disable:unified-signatures
import http from 'http'
import {
log,
VERSION,
} from '../config'
import {
IoAuth,
} from './io-auth'
import {
IoManager,
} from './io-manager'
import {
IoSocket,
} from './io-socket'
export interface IoServerOptions {
httpServer : http.Server,
httpPath? : string,
}
export class IoServer {
public static readonly VERSION = VERSION
public ioManager: IoManager
protected ioAuth: IoAuth
protected ioSocket: IoSocket
/**
* Constructor
*/
constructor (
public options: IoServerOptions,
) {
log.verbose('IoServer', 'constructor()')
this.ioManager = new IoManager()
this.ioAuth = new IoAuth()
this.ioSocket = new IoSocket({
auth: this.ioAuth.auth.bind(this.ioAuth),
// this will hook unRegister as well
connect : this.ioManager.register.bind(this.ioManager),
httpPath : options.httpPath,
httpServer : options.httpServer,
})
}
public version (): string {
return VERSION
}
public async start () {
log.verbose('IoServer', 'start()')
await this.ioSocket.start()
}
public async stop () {
log.verbose('IoServer', 'stop()')
// await this.ioSocket.stop()
}
}
|
-- Peter's Pet Store SQL to create and populate tables
/* Drop Table statements are included for each table to ensure that
when you create the tables no tables with the same name exist. This is
particularly important if you need to make changes to the table definitions
and re-run this script */
DROP TABLE AnimalSale CASCADE CONSTRAINTS PURGE;
DROP TABLE Animal CASCADE CONSTRAINTS PURGE;
DROP TABLE Species CASCADE CONSTRAINTS PURGE;
DROP TABLE Customer CASCADE CONSTRAINTS PURGE;
/* Create table statements
All foreign keys are named for the two tables involved in the relationship followed by fk
table1_table2_fk
Tables are created in the following order:
1st layer : Those with no foreign keys are created first
2nd layer: Tables that depend only on these tables i.e. have foreign key relationships to 1st layer
3rd layer: Tables that depend on 2nd layer or combination of 1st and 2nd layer
*/
-- Create table customer - holds details of all customers of Peter's Pets
CREATE TABLE Customer
(
custID NUMBER(6) NOT NULL ,
custName VARCHAR2(30) NULL ,
custPhone VARCHAR2(20) NULL ,
custEmail VARCHAR2(20) NULL ,
CONSTRAINT Customer_PK PRIMARY KEY (custID)
);
-- Create table Species - holds details of all species stocked by Peter's Pets
CREATE TABLE Species
(
speciesCode NUMBER(6) NOT NULL ,
speciesName VARCHAR2(30) NULL ,
speciesPrice NUMBER(5,2) NULL , -- note the specification. This means number is 5 digits, 2 of
-- which are after the decimal place. DO NOT USE DECIMAL here.
CONSTRAINT Species_PK PRIMARY KEY (speciesCode)
);
-- Create table Animal - holds details of all animals (past and present) stocked by Peter's Pets
CREATE TABLE Animal
(
animalID NUMBER(6) NOT NULL ,
animalName VARCHAR2(30) NULL ,
dateShopPurchased DATE NULL ,
speciesCode NUMBER(6) NULL ,
CONSTRAINT Animal_PK PRIMARY KEY (animalID),
CONSTRAINT Species_Animal_FK FOREIGN KEY (speciesCode) REFERENCES Species (speciesCode) ON DELETE SET NULL
);
-- Create table AnimalSale - holds details on sales of all animals to all customers. Each animal may be sold max.
-- once. Each customer can buy zero or more animals.
CREATE TABLE AnimalSale
(
custID NUMBER(6) NOT NULL ,
animalID NUMBER(6) NOT NULL ,
saleDate DATE NULL ,
CONSTRAINT AnimalSale_PK PRIMARY KEY (custID,animalID),
CONSTRAINT Customer_AnimalSale_FK FOREIGN KEY (custID) REFERENCES Customer (custID),
CONSTRAINT Animal_AnimalSale_FK FOREIGN KEY (animalID) REFERENCES Animal (animalID)
);
-- Insert statements to populate the tables
-- Species inserted first
insert into species values(1, 'Dog', 9.99);
insert into species values (2, 'Cat', 10.20);
insert into species values(3, 'Mouse', 5.00);
insert into species values(4, 'Snake', 20.00);
-- Customers inserted next
insert into customer values(1, 'D. Smith', '01 237230', 'dsmith@yahoo.co.uk ');
insert into customer values(2, 'B. Byrne', '071 237820', 'bb@gmail.com');
insert into customer values(3, 'X. Dobbs', '093 2020202', 'xdobbs@mail.com');
-- Animals inserted next
insert into animal values(1, 'Tiny', '01 JUN 2010',1);
insert into animal values (2, 'Prince', '01 JUN 2010',1);
insert into animal values(3, 'Bonnie', '01 MAR 2014',1);
insert into animal values(4, 'CJ', '01 MAR 2014',2);
insert into animal values(5, 'Sid', '01 JUN 2014',2);
insert into animal values(6, 'Danger', '04 SEP 2014',3);
insert into animal values(7, 'Sid', '01 MAR 2014',4);
-- Animal sales inserted finally
insert into animalsale values(2,2,'11 Jun 2012');
insert into animalsale values(2,4,'12 MAR 2014');
insert into animalsale values(1,1,'01 Jun 2012');
insert into animalsale values(3,5,'04 SEP 2014');
insert into animalsale values(2,6,'12 MAR 2014');
-- Commit included to persist the data
commit;
|
CREATE INDEX "IX_FLAT_CAT_NUM_SUFFIX_UP" ON "FLAT" (UPPER("CAT_NUM_SUFFIX"))
|
# frozen_string_literal: true
module WavesRubyClient
class Asset
include ActiveModel::Model
attr_accessor :name, :id, :url_id
def self.waves
new(name: 'WAVES', id: '', url_id: WavesRubyClient::WAVES_ASSET_ID)
end
def self.btc
new(name: 'BTC', id: WavesRubyClient::BTC_ASSET_ID,
url_id: WavesRubyClient::BTC_ASSET_ID)
end
def to_bytes
if name == WavesRubyClient::WAVES_ASSET_ID
[0]
else
[1].concat(WavesRubyClient::Conversion.base58_to_bytes(id))
end
end
end
end
|
package com.daimler.mbcarkit.utils
import com.daimler.mbcarkit.network.model.ApiAllowedServiceActions
import com.daimler.mbcarkit.network.model.ApiMissingServiceData
import com.daimler.mbcarkit.network.model.ApiPrerequisiteCheck
import com.daimler.mbcarkit.network.model.ApiService
import com.daimler.mbcarkit.network.model.ApiServiceRight
import com.daimler.mbcarkit.network.model.ApiServiceStatus
internal object ApiServiceFactory {
fun createService(
id: Int = 0,
name: String? = null,
description: String? = null,
shortDescription: String? = null,
shortName: String? = null,
categoryName: String? = null,
allowedActions: List<ApiAllowedServiceActions?> = emptyList(),
activationStatus: ApiServiceStatus? = null,
desiredServiceStatus: ApiServiceStatus? = null,
actualServiceStatus: ApiServiceStatus? = null,
virtualServiceStatus: ApiServiceStatus? = null,
prerequisiteCheck: List<ApiPrerequisiteCheck?> = emptyList(),
rights: List<ApiServiceRight?> = emptyList(),
missingData: ApiMissingServiceData? = null
) = ApiService(
id = id,
name = name,
description = description,
shortDescription = shortDescription,
shortName = shortName,
categoryName = categoryName,
allowedActions = allowedActions,
activationStatus = activationStatus,
desiredServiceStatus = desiredServiceStatus,
actualActivationServiceStatus = actualServiceStatus,
virtualActivationServiceStatus = virtualServiceStatus,
prerequisiteCheck = prerequisiteCheck,
rights = rights,
missingData = missingData
)
}
|
using FlatRedBall;
namespace Parme.Frb.Example.Entities
{
public partial class Bullet
{
private float _timeAlive;
/// <summary>
/// Initialization logic which is execute only one time for this Entity (unless the Entity is pooled).
/// This method is called when the Entity is added to managers. Entities which are instantiated but not
/// added to managers will not have this method called.
/// </summary>
private void CustomInitialize()
{
}
private void CustomActivity()
{
_timeAlive += TimeManager.SecondDifference;
if (_timeAlive > LifetimeInSeconds)
{
Destroy();
}
}
private void CustomDestroy()
{
DeathEmitter.IsEmitting = true;
}
private static void CustomLoadStaticContent(string contentManagerName)
{
}
}
}
|
import { AppBar, Box, Container, Grid, Paper, Toolbar, Typography } from '@mui/material';
import * as React from 'react';
import { useEffect, useState } from 'react';
import { useMsGraphClient } from '../features/msgraph';
const TopPage: React.FC = () => {
const client = useMsGraphClient();
const [me, setMe] = useState<any>(null);
useEffect(() => {
if (me) {
return;
}
client.api("/me").get().then((me) => {
setMe(me);
});
}, [client, me]);
return (
<Box>
<AppBar position="static">
<Toolbar>
<Typography variant="h6" sx={{ml: 2}}>
Microsoft Graph React App
</Typography>
</Toolbar>
</AppBar>
<Container sx={{mt: 4}}>
<Paper sx={{p:1}}>
<Grid container component="dl" spacing={1}>
<Grid item xs={3} component="dt">
<Typography variant="subtitle1">Display Name</Typography>
</Grid>
<Grid item xs={9} component="dd">
<Typography>{me?.displayName}</Typography>
</Grid>
<Grid item xs={3} component="dt">
<Typography variant="subtitle1">Mail</Typography>
</Grid>
<Grid item xs={9} component="dd">
<Typography>{me?.mail}</Typography>
</Grid>
</Grid>
</Paper>
</Container>
</Box>
)
}
export default TopPage;
|
import { getTypedAncestorTracker } from '../utils/helper'
export default {
computed: getTypedAncestorTracker('select').computed
}
|
#!/bin/sh
HADOOP_HOME=/home/hbcs/hadoop/hadoop-2.5.1
sshpass -p "hbcs2000" ssh hbcs@192.168.1.111 "$HADOOP_HOME/sbin/stop-yarn.sh"
sshpass -p "hbcs2000" ssh hbcs@192.168.1.111 "$HADOOP_HOME/sbin/stop-dfs.sh"
|
"""
CRAM file utilities.
"""
import argparse
from typing import Sequence
from xsamtools import cram
def view(args: argparse.Namespace, extra_args: Sequence[str]):
"""
A limited wrapper around "samtools view", but with functions to operate on google cloud bucket keys.
"""
cram.view(cram=args.cram, crai=args.crai, regions=args.regions, output=args.output, cram_format=args.C)
|
#pragma once
#include "robot.h"
#include <chrono>
#include <list>
class MapWalker :public WindowClass
{
struct Bullet
{
mth::float2 p;
mth::float2 v;
Bullet(mth::float2 pos, mth::float2 vel);
};
decltype(std::chrono::steady_clock::now()) m_prevTime;
std::vector<mth::float2> m_points;
float m_pointRadius;
std::list<Bullet> m_bullets;
Robot m_robo;
private:
void RandomPoints(int count, int w, int h);
void PlacedPoints();
void DrawPoints(HDC hdc);
void Update(float delta);
void UpdateBullets(float delta);
void DrawBullets(HDC hdc);
public:
MapWalker(HWND hwnd);
virtual ~MapWalker() override;
virtual void Paint(HDC hdc, int w, int h) override;
virtual void Timer(WPARAM wparam) override;
virtual void KeyDown(WPARAM wparam) override;
virtual void KeyUp(WPARAM wparam) override;
virtual void MouseMove(int x, int y, int dx, int dy) override;
virtual void LBtnDown(int x, int y) override;
};
|
require_relative '../../../spec_helper'
describe Kontena::Rpc::DockerImageApi do
let(:image) { double(:image, as_json: {})}
describe '#create' do
it 'calls Docker::Image.create' do
expect(subject).to receive(:create).and_return(image)
subject.create({})
end
end
describe '#show' do
it 'gets image from docker' do
expect(subject).to receive(:show).and_return(image)
subject.show({})
end
end
end
|
from openslides_backend.models.models import AgendaItem
from tests.system.action.base import BaseActionTestCase
class AgendaItemSystemTest(BaseActionTestCase):
def test_create(self) -> None:
self.create_model("meeting/2", {"name": "test"})
self.create_model("topic/1", {"meeting_id": 2})
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [{"content_object_id": "topic/1"}],
}
],
)
self.assert_status_code(response, 200)
model = self.get_model("agenda_item/1")
self.assertFalse(model.get("meta_deleted"))
self.assertEqual(model.get("meeting_id"), 2)
self.assertEqual(model.get("content_object_id"), "topic/1")
self.assertEqual(model.get("type"), AgendaItem.AGENDA_ITEM)
self.assertEqual(model.get("weight"), 10000)
self.assertEqual(model.get("level"), 0)
model = self.get_model("meeting/2")
self.assertEqual(model.get("agenda_item_ids"), [1])
model = self.get_model("topic/1")
self.assertEqual(model.get("agenda_item_id"), 1)
def test_create_more_fields(self) -> None:
self.create_model("meeting/1", {"name": "test"})
self.create_model("topic/1", {"meeting_id": 1})
self.create_model("agenda_item/42", {"comment": "test", "meeting_id": 1})
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [
{
"content_object_id": "topic/1",
"comment": "test_comment_oiuoitesfd",
"type": 2,
"parent_id": 42,
"duration": 360,
}
],
}
],
)
self.assert_status_code(response, 200)
agenda_item = self.get_model("agenda_item/43")
self.assertEqual(agenda_item["comment"], "test_comment_oiuoitesfd")
self.assertEqual(agenda_item["type"], 2)
self.assertEqual(agenda_item["parent_id"], 42)
self.assertEqual(agenda_item["duration"], 360)
self.assertEqual(agenda_item["weight"], 10000)
self.assertFalse(agenda_item.get("closed"))
assert agenda_item.get("level") == 1
def test_create_parent_weight(self) -> None:
self.create_model("meeting/1", {"name": "test"})
self.create_model("topic/1", {"meeting_id": 1})
self.create_model(
"agenda_item/42", {"comment": "test", "meeting_id": 1, "weight": 10}
)
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [
{
"content_object_id": "topic/1",
"comment": "test_comment_oiuoitesfd",
"type": 2,
"parent_id": 42,
"duration": 360,
}
],
}
],
)
self.assert_status_code(response, 200)
agenda_item = self.get_model("agenda_item/43")
self.assertEqual(agenda_item["comment"], "test_comment_oiuoitesfd")
self.assertEqual(agenda_item["type"], 2)
self.assertEqual(agenda_item["parent_id"], 42)
self.assertEqual(agenda_item["duration"], 360)
self.assertEqual(agenda_item["weight"], 11)
self.assertFalse(agenda_item.get("closed"))
assert agenda_item.get("level") == 1
def test_create_content_object_does_not_exist(self) -> None:
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [{"content_object_id": "topic/1"}],
}
],
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("agenda_item/1")
def test_create_differing_meeting_ids(self) -> None:
self.create_model("meeting/1", {})
self.create_model("meeting/2", {})
self.create_model("topic/1", {"meeting_id": 1})
self.create_model("agenda_item/1", {"meeting_id": 2})
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [{"content_object_id": "topic/1", "parent_id": 1}],
}
],
)
self.assert_status_code(response, 400)
self.assertIn(
"The field meeting_id must be equal",
str(response.data),
)
self.assert_model_not_exists("agenda_item/2")
def test_create_meeting_does_not_exist(self) -> None:
self.create_model("topic/1", {"meeting_id": 2})
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [{"content_object_id": "topic/1"}],
}
],
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("agenda_item/1")
def test_create_no_meeting_id(self) -> None:
self.create_model("topic/1", {})
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [{"content_object_id": "topic/1"}],
}
],
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("agenda_item/1")
def test_create_calc_fields_no_parent_agenda_type(self) -> None:
self.create_model("meeting/2", {"name": "test"})
self.create_model("topic/1", {"meeting_id": 2})
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [
{"content_object_id": "topic/1", "type": AgendaItem.AGENDA_ITEM}
],
}
],
)
self.assert_status_code(response, 200)
model = self.get_model("agenda_item/1")
assert model.get("is_internal") is False
assert model.get("is_hidden") is False
assert model.get("level") == 0
def test_create_calc_fields_no_parent_hidden_type(self) -> None:
self.create_model("meeting/2", {"name": "test"})
self.create_model("topic/1", {"meeting_id": 2})
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [
{"content_object_id": "topic/1", "type": AgendaItem.HIDDEN_ITEM}
],
}
],
)
self.assert_status_code(response, 200)
model = self.get_model("agenda_item/1")
assert model.get("is_internal") is False
assert model.get("is_hidden") is True
assert model.get("level") == 0
def test_create_calc_fields_no_parent_internal_type(self) -> None:
self.create_model("meeting/2", {"name": "test"})
self.create_model("topic/1", {"meeting_id": 2})
self.create_model("topic/2", {"meeting_id": 2})
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [
{
"content_object_id": "topic/1",
"type": AgendaItem.INTERNAL_ITEM,
}
],
}
],
)
self.assert_status_code(response, 200)
model = self.get_model("agenda_item/1")
assert model.get("is_internal") is True
assert model.get("is_hidden") is False
assert model.get("level") == 0
def test_create_calc_fields_parent_agenda_internal(self) -> None:
self.create_model("meeting/2", {"name": "test"})
self.create_model("topic/1", {"meeting_id": 2})
self.create_model(
"agenda_item/3",
{
"content_object_id": "topic/2",
"type": AgendaItem.AGENDA_ITEM,
"meeting_id": 2,
"is_internal": False,
"is_hidden": False,
"level": 0,
},
)
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [
{
"content_object_id": "topic/1",
"type": AgendaItem.INTERNAL_ITEM,
"parent_id": 3,
}
],
}
],
)
self.assert_status_code(response, 200)
model = self.get_model("agenda_item/4")
assert model.get("is_internal") is True
assert model.get("is_hidden") is False
assert model.get("level") == 1
def test_create_calc_fields_parent_internal_internal(self) -> None:
self.create_model("meeting/2", {"name": "test"})
self.create_model("topic/1", {"meeting_id": 2})
self.create_model(
"agenda_item/3",
{
"content_object_id": "topic/2",
"type": AgendaItem.INTERNAL_ITEM,
"meeting_id": 2,
"is_internal": True,
"is_hidden": False,
},
)
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [
{
"content_object_id": "topic/1",
"type": AgendaItem.INTERNAL_ITEM,
"parent_id": 3,
}
],
}
],
)
self.assert_status_code(response, 200)
model = self.get_model("agenda_item/4")
assert model.get("is_internal") is True
assert model.get("is_hidden") is False
assert model.get("level") == 1
def test_create_calc_fields_parent_internal_hidden(self) -> None:
self.create_model("meeting/2", {"name": "test"})
self.create_model("topic/1", {"meeting_id": 2})
self.create_model(
"agenda_item/3",
{
"content_object_id": "topic/2",
"type": AgendaItem.INTERNAL_ITEM,
"meeting_id": 2,
"is_internal": True,
"is_hidden": False,
"level": 12,
},
)
response = self.client.post(
"/",
json=[
{
"action": "agenda_item.create",
"data": [
{
"content_object_id": "topic/1",
"type": AgendaItem.HIDDEN_ITEM,
"parent_id": 3,
}
],
}
],
)
self.assert_status_code(response, 200)
model = self.get_model("agenda_item/4")
assert model.get("is_internal") is True
assert model.get("is_hidden") is True
assert model.get("level") == 13
|
unit ZoneClass;
interface
uses
Classes, SysUtils, Windows,
D3DClass,
InputClass,
ShaderManagerClass,
TextureManagerClass,
TimerClass,
LightClass,
UserInterfaceClass,
CameraClass,
skydomeclass,
PositionClass,
TerrainClass;
type
{ TZoneClass }
TZoneClass = class(TObject)
private
m_UserInterface: TUserInterfaceClass;
m_Camera: TCameraClass;
m_Position: TPositionClass;
m_Light: TLightClass;
m_Terrain: TTerrainClass;
m_displayUI: boolean;
m_wireFrame: boolean;
m_SkyDome: TSkyDomeClass;
public
constructor Create;
destructor Destroy; override;
function Initialize(Direct3D: TD3DClass; hwnd: hwnd; screenWidth, screenHeight: integer; screenDepth: single): HResult;
procedure Shutdown();
function Frame(Direct3D: TD3DClass; Input: TInputClass; ShaderManager: TShaderManagerClass;
TextureManager: TTextureManagerClass; frameTime: single; fps: integer): HResult;
private
procedure HandleMovementInput(Input: TInputClass; frameTime: single);
function Render(Direct3D: TD3DClass; ShaderManager: TShaderManagerClass; TextureManager: TTextureManagerClass): HResult;
end;
implementation
uses
DirectX.Math;
constructor TZoneClass.Create;
begin
m_UserInterface := nil;
m_Camera := nil;
m_Position := nil;
m_Terrain := nil;
end;
destructor TZoneClass.Destroy;
begin
end;
function TZoneClass.Initialize(Direct3D: TD3DClass; hwnd: hwnd; screenWidth, screenHeight: integer; screenDepth: single): HResult;
begin
// Create the user interface object.
m_UserInterface := TUserInterfaceClass.Create;
// Initialize the user interface object.
Result := m_UserInterface.Initialize(Direct3D, screenHeight, screenWidth);
if (Result <> S_OK) then
begin
MessageBoxW(hwnd, 'Could not initialize the user interface object.', 'Error', MB_OK);
Exit;
end;
// Create the camera object.
m_Camera := TCameraClass.Create;
// Set the initial position of the camera and build the matrices needed for rendering.
m_Camera.SetPosition(0.0, 0.0, -10.0);
m_Camera.Render();
m_Camera.RenderBaseViewMatrix();
// Create the light object.
m_Light := TLightClass.Create;
// Initialize the light object.
m_Light.SetDiffuseColor(1.0, 1.0, 1.0, 1.0);
m_Light.SetDirection(-0.5, -1.0, -0.5);
// Create the position object.
m_Position := TPositionClass.Create;
// Set the initial position and rotation.
m_Position.SetPosition(512.0, 30.0, -10.0);
m_Position.SetRotation(0.0, 0.0, 0.0);
// Create the sky dome object.
m_SkyDome := TSkyDomeClass.Create;
// Initialize the sky dome object.
Result := m_SkyDome.Initialize(Direct3D.GetDevice());
if (Result <> S_OK) then
begin
MessageBoxW(hwnd, 'Could not initialize the sky dome object.', 'Error', MB_OK);
Exit;
end;
// Create the terrain object.
m_Terrain := TTerrainClass.Create;
// Initialize the terrain object.
Result := m_Terrain.Initialize(Direct3D.GetDevice(), '.\data\setup.txt');
if (Result <> S_OK) then
begin
MessageBoxW(hwnd, 'Could not initialize the terrain object.', 'Error', MB_OK);
Exit;
end;
// Set the UI to display by default.
m_displayUI := True;
// Set wire frame rendering initially to enabled.
m_wireFrame := False;
Result := S_OK;
end;
procedure TZoneClass.Shutdown();
begin
// Release the terrain object.
if (m_Terrain <> nil) then
begin
m_Terrain.Shutdown();
m_Terrain.Free;
m_Terrain := nil;
end;
// Release the sky dome object.
if (m_SkyDome <> nil) then
begin
m_SkyDome.Shutdown();
m_SkyDome.Free;
m_SkyDome := nil;
end;
// Release the position object.
if (m_Position <> nil) then
begin
m_Position.Free;
m_Position := nil;
end;
// Release the light object.
if (m_Light <> nil) then
begin
m_Light.Free;
m_Light := nil;
end;
// Release the camera object.
if (m_Camera <> nil) then
begin
m_Camera.Free;
m_Camera := nil;
end;
// Release the user interface object.
if (m_UserInterface <> nil) then
begin
m_UserInterface.Shutdown();
m_UserInterface.Free;
m_UserInterface := nil;
end;
end;
function TZoneClass.Frame(Direct3D: TD3DClass; Input: TInputClass; ShaderManager: TShaderManagerClass;
TextureManager: TTextureManagerClass; frameTime: single; fps: integer): HResult;
var
posX, posY, posZ, rotX, rotY, rotZ: single;
begin
// Do the frame input processing.
HandleMovementInput(Input, frameTime);
// Get the view point position/rotation.
m_Position.GetPosition(posX, posY, posZ);
m_Position.GetRotation(rotX, rotY, rotZ);
// Do the frame processing for the user interface.
Result := m_UserInterface.Frame(Direct3D.GetDeviceContext(), fps, posX, posY, posZ, rotX, rotY, rotZ);
if (Result <> S_OK) then
Exit;
// Render the graphics.
Result := Render(Direct3D, ShaderManager, TextureManager);
end;
procedure TZoneClass.HandleMovementInput(Input: TInputClass; frameTime: single);
var
keyDown: boolean;
posX, posY, posZ, rotX, rotY, rotZ: single;
begin
// Set the frame time for calculating the updated position.
m_Position.SetFrameTime(frameTime);
// Handle the input.
keyDown := Input.IsLeftPressed();
m_Position.TurnLeft(keyDown);
keyDown := Input.IsRightPressed();
m_Position.TurnRight(keyDown);
keyDown := Input.IsUpPressed();
m_Position.MoveForward(keyDown);
keyDown := Input.IsDownPressed();
m_Position.MoveBackward(keyDown);
keyDown := Input.IsAPressed();
m_Position.MoveUpward(keyDown);
keyDown := Input.IsZPressed();
m_Position.MoveDownward(keyDown);
keyDown := Input.IsPgUpPressed();
m_Position.LookUpward(keyDown);
keyDown := Input.IsPgDownPressed();
m_Position.LookDownward(keyDown);
// Get the view point position/rotation.
m_Position.GetPosition(posX, posY, posZ);
m_Position.GetRotation(rotX, rotY, rotZ);
// Set the position of the camera.
m_Camera.SetPosition(posX, posY, posZ);
m_Camera.SetRotation(rotX, rotY, rotZ);
// Determine if the user interface should be displayed or not.
if (Input.IsF1Toggled()) then
begin
m_displayUI := not m_displayUI;
end;
// Determine if the terrain should be rendered in wireframe or not.
if (Input.IsF2Toggled()) then
m_wireFrame := not m_wireFrame;
end;
function TZoneClass.Render(Direct3D: TD3DClass; ShaderManager: TShaderManagerClass; TextureManager: TTextureManagerClass): HResult;
var
worldMatrix, viewMatrix, projectionMatrix, baseViewMatrix, orthoMatrix: TXMMATRIX;
cameraPosition :TXMFLOAT3;
begin
// Generate the view matrix based on the camera's position.
m_Camera.Render();
// Get the world, view, and projection matrices from the camera and d3d objects.
Direct3D.GetWorldMatrix(worldMatrix);
m_Camera.GetViewMatrix(viewMatrix);
Direct3D.GetProjectionMatrix(projectionMatrix);
m_Camera.GetBaseViewMatrix(baseViewMatrix);
Direct3D.GetOrthoMatrix(orthoMatrix);
// Get the position of the camera.
cameraPosition := m_Camera.GetPosition();
// Clear the buffers to begin the scene.
Direct3D.BeginScene(0.0, 0.0, 0.0, 1.0);
// Turn off back face culling and turn off the Z buffer.
Direct3D.TurnOffCulling();
Direct3D.TurnZBufferOff();
// Translate the sky dome to be centered around the camera position.
worldMatrix := XMMatrixTranslation(cameraPosition.x, cameraPosition.y, cameraPosition.z);
// Render the sky dome using the sky dome shader.
m_SkyDome.Render(Direct3D.GetDeviceContext());
result := ShaderManager.RenderSkyDomeShader(Direct3D.GetDeviceContext(), m_SkyDome.GetIndexCount(), worldMatrix, viewMatrix,
projectionMatrix, m_SkyDome.GetApexColor(), m_SkyDome.GetCenterColor());
if (Result <> S_OK) then
Exit;
// Reset the world matrix.
Direct3D.GetWorldMatrix(worldMatrix);
// Turn the Z buffer back and back face culling on.
Direct3D.TurnZBufferOn();
Direct3D.TurnOnCulling();
// Turn on wire frame rendering of the terrain if needed.
if (m_wireFrame) then
Direct3D.EnableWireframe();
// Render the terrain grid using the terrain shader.
m_Terrain.Render(Direct3D.GetDeviceContext());
Result := ShaderManager.RenderTerrainShader(Direct3D.GetDeviceContext(), m_Terrain.GetIndexCount(), worldMatrix,
viewMatrix, projectionMatrix, TextureManager.GetTexture(0), TextureManager.GetTexture(1), m_Light.GetDirection(), m_Light.GetDiffuseColor());
if (Result <> S_OK) then
Exit;
// Turn off wire frame rendering of the terrain if it was on.
if (m_wireFrame) then
Direct3D.DisableWireframe();
// Render the user interface.
if (m_displayUI) then
begin
Result := m_UserInterface.Render(Direct3D, ShaderManager, worldMatrix, baseViewMatrix, orthoMatrix);
if (Result <> S_OK) then
Exit;
end;
// Present the rendered scene to the screen.
Direct3D.EndScene();
end;
end.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.