repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
microsoft/fugue
include/transaction/transaction-task.h
<filename>include/transaction/transaction-task.h #ifndef TXSERVICE_TRANSACTION_TRANSACTION_TASK_H_ #define TXSERVICE_TRANSACTION_TRANSACTION_TASK_H_ #include "transaction/transaction-execution.h" #include "transaction/transaction-request.h" namespace txservice::transaction { class TransactionTask { public: TransactionTask() : transaction_execution_(), transaction_request_(), in_use_(false), session_id_(0) { } TransactionTask(TransactionExecution transaction_execution, TransactionRequest transaction_request) : transaction_execution_(transaction_execution), transaction_request_(transaction_request), in_use_(false), session_id_(0) { } TransactionExecution *GetTransactionExecution() { return &transaction_execution_; } TransactionRequest *GetTransactionRequest() { return &transaction_request_; } void Reset(int64_t session_id) { transaction_execution_.Reset(); transaction_request_.Reset(); in_use_ = true; session_id_ = session_id; } inline void Release() { in_use_ = false; } inline bool InUse() { return in_use_; } inline int64_t GetSessionID() { return session_id_; } private: TransactionExecution transaction_execution_; TransactionRequest transaction_request_; bool in_use_; int64_t session_id_; }; } // namespace txservice::transaction #endif // TXSERVICE_TRANSACTION_TRANSACTION_TASK_H_
rayvburn/incare_people_sim
hubero_interfaces/include/hubero_interfaces/navigation_base.h
#pragma once #include <hubero_common/defines.h> #include <hubero_common/logger.h> #include <hubero_common/typedefs.h> #include <string> #include <tuple> namespace hubero { /** * @brief Creates interface class for algorithms that may provide navigation skills to the HuBeRo actors */ class NavigationBase { public: const double GOAL_REACHED_TOLERANCE_DEFAULT = 0.1; /** * @brief Default constructor */ NavigationBase(): initialized_(false), feedback_(TaskFeedbackType::TASK_FEEDBACK_UNDEFINED) {} /** * @brief Initialization */ inline virtual bool initialize(const std::string& actor_name, const std::string& world_frame_name) { actor_name_ = actor_name; frame_world_ = world_frame_name; initialized_ = true; } inline virtual bool initialize( const std::string& actor_name, const std::string& world_frame_name, const std::string& global_ref_frame_name ) { frame_global_ref_ = global_ref_frame_name; initialize(actor_name, world_frame_name); } /** * @brief Evaluates whether given @ref goal is achievable starting from @ref start * * @note could be const, but ROS version uses non-const serviceclient::call inside */ inline virtual bool isPoseAchievable(const Pose3& start, const Pose3& goal, const std::string& frame) { return false; } /** * @brief Related to localisation * * @note @ref pose must be expressed in the same frame as goal pose in @setGoal * @note Basic implementation mainly for unit testing purposes */ virtual void update(const Pose3& pose, const Vector3& vel_lin = Vector3(), const Vector3& vel_ang = Vector3()) { current_pose_ = pose; Vector3 goal_xy(goal_pose_.Pos().X(), goal_pose_.Pos().Y(), 0.0); Vector3 curr_xy(current_pose_.Pos().X(), current_pose_.Pos().Y(), 0.0); double dist_to_goal = (curr_xy - goal_xy).Length(); if (dist_to_goal <= NavigationBase::GOAL_REACHED_TOLERANCE_DEFAULT) { feedback_ = TaskFeedbackType::TASK_FEEDBACK_SUCCEEDED; } else if (getFeedback() == TaskFeedbackType::TASK_FEEDBACK_PENDING) { feedback_ = TaskFeedbackType::TASK_FEEDBACK_ACTIVE; } } /** * @brief Related to simplest navigation task (moving to goal pose) */ virtual bool setGoal(const Pose3& pose, const std::string& frame) { goal_pose_ = pose; goal_frame_ = frame; feedback_ = TaskFeedbackType::TASK_FEEDBACK_PENDING; return true; } /** * @brief Tries to abort the movement goal, returns true if successful */ virtual bool cancelGoal() { feedback_ = TaskFeedbackType::TASK_FEEDBACK_ABORTED; return false; } /** * @brief Sets feedback to TERMINATED * * @details It is wise to call it once last goal was reached, otherwise it will keep succeeded state */ virtual void finish() { feedback_ = TaskFeedbackType::TASK_FEEDBACK_TERMINATED; } /** * @brief Computes reachable pose that is closest to the given pose, starting from current pose from update call */ virtual std::tuple<bool, Pose3> computeClosestAchievablePose(const Pose3& pose, const std::string& frame) { return std::make_tuple(false, pose); } /** * @brief Randomly chooses a reachable goal * @details Goal is expressed in global reference frame, see @ref getGlobalReferenceFrame * @return Tuple: bool is true if goal is valid, Pose3 is reachable pose */ virtual std::tuple<bool, Pose3> findRandomReachableGoal() { return std::make_tuple(false, Pose3()); } /** * @brief Returns TaskFeedbackType */ inline TaskFeedbackType getFeedback() const { return feedback_; } /** * @brief Returns true if class was initialized successfully */ inline bool isInitialized() const { return initialized_; } /** * @brief Returns newest velocity command */ inline virtual Vector3 getVelocityCmd() const { return Vector3(); } /** * @brief Retrieves newest goal pose */ inline virtual Pose3 getGoalPose() const { return goal_pose_; } /** * @brief Retrieves newest goal's frame ID */ inline virtual std::string getGoalFrame() const { return goal_frame_; } /** * @brief Retrieves name of the world (simulator) frame */ inline virtual std::string getWorldFrame() const { return frame_world_; } /** * @brief Retrieves name of the global reference (map) frame */ inline virtual std::string getGlobalReferenceFrame() const { return frame_global_ref_; } /** * @brief Retrieves how far from the goal the actor can be located to accept the global plan */ inline virtual double getGoalTolerance() const { return 1e-03; } /** * @brief Transforms local velocity (typically received as velocity command) to a global coordinate system */ static Vector3 convertCommandToGlobalCs(const double& yaw_actor, const Vector3& cmd_vel_local) { // slide 38 at https://www.cs.princeton.edu/courses/archive/fall11/cos495/COS495-Lecture3-RobotMotion.pdf ignition::math::Matrix3d r( cos(yaw_actor), 0.0, 0.0, sin(yaw_actor), 0.0, 0.0, 0.0, 0.0, 1.0 ); return r * cmd_vel_local; } protected: /// @brief Stores initialization indicator flag bool initialized_; /// @brief Name of the actor std::string actor_name_; /// @brief Navigation task feedback TaskFeedbackType feedback_; /// @brief Name of the frame that incoming ( @ref update ) poses are referenced in std::string frame_world_; /// @brief Name of the frame that goals chosen with @ref findRandomReachableGoal are referenced in std::string frame_global_ref_; /// @brief Stores most recent pose from localisation Pose3 current_pose_; /// @brief Stores newest goal pose Pose3 goal_pose_; /// @brief Stores newest goal pose frame std::string goal_frame_; }; // class NavigationBase } // namespace hubero
theseushu/funong-web
app/modules/common/form/validations.js
import _isEmpty from 'lodash/isEmpty'; const distanceRegex = /^[1-9][0-9]{0,1}$/; // 1 - 99 const quantityRegex = /^[1-9][0-9]{0,4}$/; // 1 - 99999 const priceRegex = /^[0-9]{1,7}(\.[0-9]{1,2})?$/; // 0.01 - 9999999.99 const IDCardRegex = /(^\d{15}$)|(^\d{17}([0-9]|X|x)$)/; export const required = (value) => { // lodash isEmpty(number) = true. so I added number check before isEmpty let empty = false; if (typeof value === 'number') { empty = false; } else if (typeof value === 'string') { empty = value.trim() === ''; } else { empty = _isEmpty(value); } return empty ? '必填' : undefined; }; export const maxLength = (max) => (value) => value && value.length > max ? `至多${max}个字` : undefined; export const maxLength10 = maxLength(10); export const maxLength30 = maxLength(30); export const number = (value) => value && isNaN(Number(value)) ? '必须使用数字' : undefined; export const minValue = (min) => (value) => value && value <= min ? `使用大于${min}的数字` : undefined; export const minValue0 = minValue(0); export const IDCard = (value) => IDCardRegex.test(value) ? undefined : '请输入正确的身份证号码'; export const distance = (value) => distanceRegex.test(value) ? undefined : '请使用1-99的整数'; export const quantity = (value) => quantityRegex.test(value) ? undefined : '请使用正整数,最大10万'; export const price = (value) => priceRegex.test(value) ? undefined : '请使用正数,小数位两位。如:100, 7.13, 0.99'; // logistics product export const capacity = quantity; export const count = quantity;
google-org/services-eligibility-calculator
client/testing/updateMockData.js
/** * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ const fs = require('fs').promises; const globalFetch = require('node-fetch'); const prettier = require('prettier'); const jestDiff = require('jest-diff'); const chalk = require('chalk'); import { LocalStorage } from 'node-localstorage'; import { fetch } from '../common/dataAccess'; import { login } from '../common/auth'; global.fetch = globalFetch; global.Headers = globalFetch.Headers; global.localStorage = new LocalStorage('./scratch'); let deterministicIdMap = {}; let deterministicIdCounter = {}; let licenseHeader = `/** * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ `; async function getAllData(type, queryParams, loginUser) { let fetchUrl = `${type}/?pagination=false&order[name]=asc&order[description]=asc${queryParams}`; await login(loginUser.user, loginUser.password); let response = await fetch(fetchUrl); return await response.json(); } function getNextDeterministicId(type) { if (deterministicIdCounter[type] === undefined) { deterministicIdCounter[type] = 0; } return (deterministicIdCounter[type]++).toString(); } function getDeterministicId(fullId) { let { type, id, subtype } = fullId.match( /(?<type>\/([^\/]*\/){2})(?<id>[^\/]*)(\/(?<subtype>).*)?/ ).groups; if (deterministicIdMap[`${type}${id}`] === undefined) { let newId = type === '/api/settings/' || type === '/api/user_activities/' ? id : getNextDeterministicId(type); deterministicIdMap[`${type}${id}`] = { '@id': `${type}${newId}`, id: newId }; } return deterministicIdMap[`${type}${id}`]; } function makeChildIdsDeterministic(entry) { let childrenToOverwrite = Object.keys(entry).reduce((prev, childKey) => { let child = entry[childKey]; if (child && child['@id'] !== undefined) { prev[childKey] = makeEntryDeterministic(child); } if ( childKey == 'history' || childKey == 'interests' || childKey == 'eligibilityProfiles' ) { prev[childKey] = child.map(entry => makeEntryDeterministic(entry)); } if (childKey == 'serviceId') { prev[childKey] = getDeterministicId('/api/services/' + child).id; } return prev; }, {}); return { ...entry, ...childrenToOverwrite }; } function makeEntryDeterministic(entry) { let newId = getDeterministicId(entry['@id']); if (entry['draft'] && typeof entry['draft'] === 'string') { entry['draft'] = getDeterministicId(entry['draft'])['@id']; } if (entry['service'] && typeof entry['service'] === 'string') { entry['service'] = getDeterministicId(entry['service'])['@id']; } if (entry['lastModifiedAgo']) { // Hard-code so doesn't change as time progresses entry['lastModifiedAgo'] = '20 days ago'; } if (entry['lastLogin']) { // Hard-code so date never changes entry['lastLogin'] = '2020-02-27T20:02:04+00:00'; } let newIdCopy = { ...newId }; if (entry.id === undefined) delete newIdCopy.id; let processedEntry = makeChildIdsDeterministic(entry); return { ...processedEntry, ...newIdCopy }; } function makeIdsDeterministic(data) { data['hydra:member'] = data['hydra:member'].map(makeEntryDeterministic); return data; } async function buildMockFileContents(path, type, queryParams, loginUser) { let data = await getAllData(type, queryParams, loginUser); data = makeIdsDeterministic(data); let prettierOptions = await prettier.resolveConfig(path); prettierOptions.filepath = path; return ( licenseHeader + prettier.format( `export const ${type} = ${JSON.stringify(data)}; \n`, prettierOptions ) ); } async function createMockFile( path, type, queryParams = '', loginUser = { user: 'user', password: '<PASSWORD>' } ) { let content = await buildMockFileContents(path, type, queryParams, loginUser); await fs.writeFile(path, content, 'utf8'); } async function checkMockFile( path, type, queryParams = '', loginUser = { user: 'user', password: '<PASSWORD>' } ) { let received = await buildMockFileContents( path, type, queryParams, loginUser ); let expected = await fs.readFile(path, 'utf8'); if (expected !== received) { console.error(`\n${chalk.white.bgRed.bold(' FAIL ')} ${path}`); console.error(jestDiff(expected, received, { expand: false })); return chalk.red(`Mock file ${path} does not match.`); } else { console.log(`\n${chalk.white.bgGreen.bold(' PASS ')} ${path}`); return null; } } async function updateSnapshots() { console.log('Updating mock data.'); await createMockFile(`${__dirname}/mockRawInterestData.js`, 'interests'); await createMockFile(`${__dirname}/mockRawDepartmentData.js`, 'departments'); await createMockFile( `${__dirname}/mockRawServiceData.js`, 'services', '&exists[archivedDateTime]=false' ); await createMockFile( `${__dirname}/mockRawArchivedServiceData.js`, 'services', '&exists[archivedDateTime]=true' ); await createMockFile( `${__dirname}/mockRawServiceDraftData.js`, 'service_drafts' ); await createMockFile( `${__dirname}/mockRawServiceHistoryData.js`, 'service_histories' ); await createMockFile( `${__dirname}/mockRawServiceActivityData.js`, 'service_activities', '', { user: 'admin', password: '<PASSWORD>' } ); await createMockFile( `${__dirname}/mockRawUserActivityData.js`, 'user_activities', '', { user: 'admin', password: '<PASSWORD>' } ); await createMockFile(`${__dirname}/mockRawSettingsData.js`, 'settings'); console.log(chalk.green('\nThe mock data has been updated!\n')); } async function checkSnapshots() { console.log('Checking mock data.'); let results = []; results.push( await checkMockFile(`${__dirname}/mockRawInterestData.js`, 'interests') ); results.push( await checkMockFile(`${__dirname}/mockRawDepartmentData.js`, 'departments') ); results.push( await checkMockFile( `${__dirname}/mockRawServiceData.js`, 'services', '&exists[archivedDateTime]=false' ) ); results.push( await checkMockFile( `${__dirname}/mockRawArchivedServiceData.js`, 'services', '&exists[archivedDateTime]=true' ) ); results.push( await checkMockFile( `${__dirname}/mockRawServiceDraftData.js`, 'service_drafts' ) ); results.push( await checkMockFile( `${__dirname}/mockRawServiceHistoryData.js`, 'service_histories' ) ); results.push( await checkMockFile( `${__dirname}/mockRawServiceActivityData.js`, 'service_activities', '', { user: 'admin', password: '<PASSWORD>' } ) ); results.push( await checkMockFile( `${__dirname}/mockRawUserActivityData.js`, 'user_activities', '', { user: 'admin', password: '<PASSWORD>' } ) ); results.push( await checkMockFile(`${__dirname}/mockRawSettingsData.js`, 'settings') ); let errors = results.filter(s => s !== null); if (errors.length > 0) { throw errors.join('\n'); } console.log(chalk.green('\nThe mock data matches!\n')); } (async function() { try { process.argv.includes('--check') ? await checkSnapshots() : await updateSnapshots(); process.exit(); } catch (error) { console.error(chalk.red(`\n${error.message}\n${error.stack}\n`)); process.exit(1); } })();
ClausKlein/taox11
tao/x11/typecodefactory/typecodefactory_adapter_impl.h
/** * @file typecodefactory_adapter_impl.h * @author <NAME> * * @brief CORBA C++11 typecodefactory library * * @copyright Copyright (c) Remedy IT Expertise BV */ #ifndef TAOX11_TYPECODEFACTORY_ADAPTER_IMPL_H #define TAOX11_TYPECODEFACTORY_ADAPTER_IMPL_H #pragma once #include "tao/x11/typecodefactory/taox11_typecodefactory_export.h" #include "ace/Service_Config.h" #include "tao/x11/typecode_factory_adapter.h" namespace TAOX11_NAMESPACE { class TAOX11_TYPECODEFACTORY_Export TypeCodeFactory_Adapter_Impl : public TypecodeFactoryAdapter { public: TypeCodeFactory_Adapter_Impl (); virtual ~TypeCodeFactory_Adapter_Impl (); CORBA::object_reference<CORBA::TypeCode> create_struct_tc ( const std::string &id, const std::string &name, const CORBA::StructMemberSeq &members) override; CORBA::object_reference<CORBA::TypeCode> create_union_tc ( const std::string &id, const std::string &name, CORBA::object_reference<CORBA::TypeCode> discriminator_type, const CORBA::UnionMemberSeq &members) override; CORBA::object_reference<CORBA::TypeCode> create_enum_tc ( const std::string &id, const std::string &name, const CORBA::EnumMemberSeq &members) override; CORBA::object_reference<CORBA::TypeCode> create_alias_tc ( const std::string &id, const std::string &name, CORBA::object_reference<CORBA::TypeCode> original_type) override; CORBA::object_reference<CORBA::TypeCode> create_exception_tc ( const std::string &id, const std::string &name, const CORBA::StructMemberSeq &members) override; CORBA::object_reference<CORBA::TypeCode> create_interface_tc ( const std::string &id, const std::string &name) override; CORBA::object_reference<CORBA::TypeCode> create_string_tc ( uint32_t bound) override; CORBA::object_reference<CORBA::TypeCode> create_wstring_tc ( uint32_t bound) override; CORBA::object_reference<CORBA::TypeCode> create_fixed_tc ( uint16_t digits, uint16_t scale) override; CORBA::object_reference<CORBA::TypeCode> create_sequence_tc ( uint32_t bound, CORBA::object_reference<CORBA::TypeCode> element_type) override; CORBA::object_reference<CORBA::TypeCode> create_array_tc ( uint32_t length, CORBA::object_reference<CORBA::TypeCode> element_type) override; CORBA::object_reference<CORBA::TypeCode> create_value_tc ( const std::string &id, const std::string &name, CORBA::ValueModifier type_modifier, CORBA::object_reference<CORBA::TypeCode> concrete_base, const CORBA::ValueMemberSeq &members) override; CORBA::object_reference<CORBA::TypeCode> create_value_box_tc ( const std::string &id, const std::string &name, CORBA::object_reference<CORBA::TypeCode> boxed_type) override; CORBA::object_reference<CORBA::TypeCode> create_native_tc ( const std::string &id, const std::string &name) override; CORBA::object_reference<CORBA::TypeCode> create_recursive_tc ( const std::string &id) override; CORBA::object_reference<CORBA::TypeCode> create_abstract_interface_tc ( const std::string &id, const std::string &name) override; CORBA::object_reference<CORBA::TypeCode> create_local_interface_tc ( const std::string &id, const std::string &name) override; CORBA::object_reference<CORBA::TypeCode> create_component_tc ( const std::string &id, const std::string &name) override; CORBA::object_reference<CORBA::TypeCode> create_home_tc ( const std::string &id, const std::string &name) override; CORBA::object_reference<CORBA::TypeCode> create_event_tc ( const std::string &id, const std::string &name, CORBA::ValueModifier type_modifier, CORBA::object_reference<CORBA::TypeCode> concrete_base, const CORBA::ValueMemberSeq &members) override; static int Initializer (); }; static int TAOX11_TypeCodeFactory_Adapter_Impl_Initializer = TypeCodeFactory_Adapter_Impl::Initializer (); ACE_STATIC_SVC_DECLARE (TypeCodeFactory_Adapter_Impl) ACE_FACTORY_DECLARE (TAOX11_TYPECODEFACTORY, TypeCodeFactory_Adapter_Impl) } // namespace TAOX11_NAMESPACE #endif /* TAOX11_TYPECODEFACTORY_ADAPTER_IMPL_H */
joshRpowell/cri
samples/sample_simple.rb
# frozen_string_literal: true $LOAD_PATH.unshift(File.dirname(__FILE__) + '/../lib') require 'cri' command = Cri::Command.define do name 'moo' usage 'usage: moo [options]' summary 'does stuff' description <<~DESC This command does a lot of stuff. I really mean a lot. Well actually I am lying. It doesn’t do that much. In fact, it barely does anything. It’s merely a sample command to show off Cri! DESC option :a, :aaa, 'opt a', argument: :optional required :b, :bbb, 'opt b' optional :c, :ccc, 'opt c' flag :d, :ddd, 'opt d' forbidden :e, :eee, 'opt e' flag :f, :fff, 'opt f', hidden: true flag :g, :ggg, 'this is an option with a very long description that should reflow nicely' flag :s, nil, 'option with only a short form' flag nil, 'long', 'option with only a long form' optional :i, :iii, 'opt i', default: 'donkey' run do |opts, args| puts 'Executing!' p(opts: opts, args: args) end end puts command.help command.run(ARGV)
mineLdiver/expressions
src/main/java/uk/co/benjiweber/expressions/function/SeptConsumer.java
<gh_stars>0 package uk.co.benjiweber.expressions.function; public interface SeptConsumer<T, U, V, W, X, Y, Z> { void accept(T t, U u, V v, W w, X x, Y y, Z z); }
lechium/tvOS135Headers
System/Library/PrivateFrameworks/TVSystemMenuUI.framework/_TVSMHighContrastFocusView.h
/* * This header is generated by classdump-dyld 1.0 * on Sunday, June 7, 2020 at 11:44:46 AM Mountain Standard Time * Operating System: Version 13.4.5 (Build 17L562) * Image Source: /System/Library/PrivateFrameworks/TVSystemMenuUI.framework/TVSystemMenuUI * classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. */ #import <TVSystemMenuUI/TVSystemMenuUI-Structs.h> #import <UIKitCore/UIView.h> @class UIView; @interface _TVSMHighContrastFocusView : UIView { UIView* _innerFocusRing; } @property (nonatomic,readonly) UIView * innerFocusRing; //@synthesize innerFocusRing=_innerFocusRing - In the implementation block -(id)initWithFrame:(CGRect)arg1 ; -(void)layoutSubviews; -(void)_setCornerRadius:(double)arg1 ; -(void)_setContinuousCornerRadius:(double)arg1 ; -(void)_updateForAccessibilityChange; -(UIView *)innerFocusRing; @end
tangosource/drone
Godeps/_workspace/src/github.com/drone/go-github/github/contents.go
<reponame>tangosource/drone package github import ( "encoding/base64" "fmt" ) // These API methods let you retrieve the contents of files within a repository // as Base64 encoded content. type ContentResource struct { client *Client } type Content struct { Type string `json:"type"` Name string `json:"name"` Path string `json:"path"` Encoding string `json:"encoding"` Content string `json:"content"` Size int64 `json:"size"` Sha string `json:"sha"` } func (c *Content) DecodeContent() ([]byte, error) { return base64.StdEncoding.DecodeString(c.Content) } // This method returns the contents of a file or directory in a repository. func (r *ContentResource) Find(owner, repo, path string) (*Content, error) { content := Content{} url_path := fmt.Sprintf("/repos/%s/%s/contents/%s", owner, repo, path) if err := r.client.do("GET", url_path, nil, &content); err != nil { return nil, err } return &content, nil } // This method returns the contents of a file or directory in a repository. func (r *ContentResource) FindRef(owner, repo, path, ref string) (*Content, error) { content := Content{} url_path := fmt.Sprintf("/repos/%s/%s/contents/%s?ref=%s", owner, repo, path, ref) if err := r.client.do("GET", url_path, nil, &content); err != nil { return nil, err } return &content, nil } // This method returns the preferred README for a repository. func (r *ContentResource) ReadMe(owner, repo string) (*Content, error) { content := Content{} path := fmt.Sprintf("/repos/%s/%s/readme", owner, repo) if err := r.client.do("GET", path, nil, &content); err != nil { return nil, err } return &content, nil }
sscit/rel
rel-lib/test/unittest/LexerTest.cpp
<reponame>sscit/rel #include "gtest/gtest.h" #include "rel-lib/src/Lexer.h" class LexerTestFixture : public ::testing::Test, public Lexer { protected: LexerTestFixture() : Lexer(logger), lexer_test(logger) {} void SetUp() override { token_list = new std::list<Token>(); // logger.SetLogLevel(LogLevel::DBUG); } void TearDown() override { delete token_list; } Logger logger; std::string testdata; Lexer lexer_test; }; TEST_F(LexerTestFixture, AllTokensInString) { std::string testdata = "\"this string has 7 tokens\""; FileReader r(testdata); FileTokenData d(DataType::RequirementsData, r); lexer_test.Read(d); EXPECT_EQ(d.token_list.size(), 7); EXPECT_EQ(d.token_list.front().GetTokenType(), TokenType::QUOTATION_MARK); } TEST_F(LexerTestFixture, AllTokensInString2) { std::string testdata = "\"to \\\"emphasize\\\"\""; logger.LOG(LogLevel::DBUG, testdata); FileReader r(testdata); FileTokenData d(DataType::RequirementsData, r); lexer_test.Read(d); EXPECT_EQ(d.token_list.size(), 6); EXPECT_EQ(d.token_list.front().GetTokenType(), TokenType::QUOTATION_MARK); EXPECT_EQ(std::next(d.token_list.begin(), 2)->GetTokenType(), TokenType::QUOTATION_MARK_MASKED); } TEST_F(LexerTestFixture, AllTokensInString3) { std::string testdata = "\"to\\\"emphasize\\\"\""; logger.LOG(LogLevel::DBUG, testdata); FileReader r(testdata); FileTokenData d(DataType::RequirementsData, r); lexer_test.Read(d); EXPECT_EQ(d.token_list.size(), 6); EXPECT_EQ(d.token_list.front().GetTokenType(), TokenType::QUOTATION_MARK); EXPECT_EQ(std::next(d.token_list.begin(), 2)->GetTokenType(), TokenType::QUOTATION_MARK_MASKED); } TEST_F(LexerTestFixture, AllTokensInString4) { std::string testdata = "\"\\\\\\\\x\""; FileReader r(testdata); FileTokenData d(DataType::RequirementsData, r); lexer_test.Read(d); EXPECT_EQ(d.token_list.size(), 3); EXPECT_EQ(d.token_list.front().GetTokenType(), TokenType::QUOTATION_MARK); EXPECT_EQ(std::next(d.token_list.begin(), 1)->GetTokenType(), TokenType::STRING_VALUE); EXPECT_EQ(std::next(d.token_list.begin(), 1)->GetTokenValue()->size(), 5); EXPECT_EQ(std::next(d.token_list.begin(), 2)->GetTokenType(), TokenType::QUOTATION_MARK); } TEST_F(LexerTestFixture, AllTokensInString5) { std::string testdata = "\"a string \\ with \\ 9 tokens \""; FileReader r(testdata); FileTokenData d(DataType::RequirementsData, r); lexer_test.Read(d); EXPECT_EQ(d.token_list.size(), 9); EXPECT_EQ(d.token_list.front().GetTokenType(), TokenType::QUOTATION_MARK); EXPECT_EQ(std::next(d.token_list.begin(), 8)->GetTokenType(), TokenType::QUOTATION_MARK); } TEST_F(LexerTestFixture, AllTokensInString6) { std::string testdata = "\"a string \\with \\7 tokens \""; FileReader r(testdata); FileTokenData d(DataType::RequirementsData, r); lexer_test.Read(d); EXPECT_EQ(d.token_list.size(), 7); } TEST_F(LexerTestFixture, TypeDefinition) { testdata = "type XXX { attribute : id, status : New, }"; FileReader r(testdata); FileTokenData d(DataType::RequirementsSpecification, r); lexer_test.Read(d); EXPECT_EQ(d.token_list.size(), 12); EXPECT_EQ(d.token_list.front().GetPositionInLineOfToken(), 0); EXPECT_EQ(d.token_list.front().GetLineNumberOfToken(), 0); EXPECT_EQ(d.token_list.back().GetLineNumberOfToken(), 0); EXPECT_EQ(d.token_list.back().GetPositionInLineOfToken(), 41); d.token_list.pop_front(); EXPECT_EQ(d.token_list.front().GetPositionInLineOfToken(), 5); EXPECT_EQ(d.token_list.front().GetLineNumberOfToken(), 0); } TEST_F(LexerTestFixture, EnumDefinition) { testdata = "enum XXX { MyAttr }"; FileReader r(testdata); FileTokenData d(DataType::RequirementsSpecification, r); lexer_test.Read(d); EXPECT_EQ(d.token_list.size(), 5); EXPECT_EQ(d.token_list.front().GetPositionInLineOfToken(), 0); EXPECT_EQ(d.token_list.front().GetLineNumberOfToken(), 0); EXPECT_EQ(d.token_list.back().GetPositionInLineOfToken(), 18); EXPECT_EQ(d.token_list.back().GetLineNumberOfToken(), 0); } TEST_F(LexerTestFixture, EnumDefinition2) { testdata = "enum XXX {MyAttr}"; FileReader r(testdata); FileTokenData d(DataType::RequirementsSpecification, r); lexer_test.Read(d); EXPECT_EQ(d.token_list.size(), 5); EXPECT_EQ(d.token_list.front().GetPositionInLineOfToken(), 0); EXPECT_EQ(d.token_list.front().GetLineNumberOfToken(), 0); EXPECT_EQ(d.token_list.back().GetPositionInLineOfToken(), 16); EXPECT_EQ(d.token_list.back().GetLineNumberOfToken(), 0); } TEST_F(LexerTestFixture, EnumDefinitionWithLineBreaks) { testdata = "enum XXX \n { \n MyAttr \n }"; FileReader r(testdata); FileTokenData d(DataType::RequirementsSpecification, r); lexer_test.Read(d); EXPECT_EQ(d.token_list.size(), 8); // enum EXPECT_EQ(d.token_list.front().GetPositionInLineOfToken(), 0); EXPECT_EQ(d.token_list.front().GetLineNumberOfToken(), 0); // XXX d.token_list.pop_front(); EXPECT_EQ(d.token_list.front().GetLineNumberOfToken(), 0); EXPECT_EQ(d.token_list.front().GetPositionInLineOfToken(), 5); // \n d.token_list.pop_front(); EXPECT_EQ(d.token_list.front().GetLineNumberOfToken(), 0); EXPECT_EQ(d.token_list.front().GetPositionInLineOfToken(), 9); // { d.token_list.pop_front(); EXPECT_EQ(d.token_list.front().GetLineNumberOfToken(), 1); EXPECT_EQ(d.token_list.front().GetPositionInLineOfToken(), 6); // \n d.token_list.pop_front(); EXPECT_EQ(d.token_list.front().GetLineNumberOfToken(), 1); EXPECT_EQ(d.token_list.front().GetPositionInLineOfToken(), 8); // MyAttr d.token_list.pop_front(); EXPECT_EQ(d.token_list.front().GetLineNumberOfToken(), 2); EXPECT_EQ(d.token_list.front().GetPositionInLineOfToken(), 1); // \n d.token_list.pop_front(); EXPECT_EQ(d.token_list.front().GetLineNumberOfToken(), 2); EXPECT_EQ(d.token_list.front().GetPositionInLineOfToken(), 8); // } EXPECT_EQ(d.token_list.back().GetPositionInLineOfToken(), 3); EXPECT_EQ(d.token_list.back().GetLineNumberOfToken(), 3); } TEST_F(LexerTestFixture, EnumDefinitionWithComment) { testdata = "enum XXX/*anInlineComment*/{ MyAttr }"; FileReader r(testdata); FileTokenData d(DataType::RequirementsSpecification, r); lexer_test.Read(d); EXPECT_EQ(d.token_list.size(), 8); } TEST_F(LexerTestFixture, LineComment) { testdata = "// test line comment\n"; FileReader r(testdata); FileTokenData d(DataType::RequirementsSpecification, r); lexer_test.Read(d); EXPECT_EQ(d.token_list.size(), 5); } TEST_F(LexerTestFixture, IsOperatorOrKeyword) { SlidingWindow v; v.push_back({'/', 0}); v.push_back({'*', 0}); EXPECT_TRUE(IsOperator(v)); v.clear(); v.push_back({'*', 0}); v.push_back({'/', 0}); EXPECT_TRUE(IsOperator(v)); v.clear(); v.push_back({'\\', 0}); v.push_back({'\"', 0}); EXPECT_TRUE(IsOperator(v)); EXPECT_TRUE(IsOperatorOrKeyword("//")); EXPECT_TRUE(IsOperatorOrKeyword("/*")); EXPECT_TRUE(IsOperatorOrKeyword("\"")); EXPECT_TRUE(IsOperatorOrKeyword("\\\"")); EXPECT_TRUE(IsOperatorOrKeyword("string")); EXPECT_TRUE(IsOperatorOrKeyword("int")); EXPECT_TRUE(IsOperatorOrKeyword("enum")); EXPECT_FALSE(IsOperatorOrKeyword("xx")); EXPECT_FALSE(IsOperatorOrKeyword("1hd")); EXPECT_FALSE(IsOperatorOrKeyword("int2")); EXPECT_FALSE(IsOperatorOrKeyword("dahsdjhaskjdhkas")); } TEST_F(LexerTestFixture, IsWhitespaceOrDelimiter) { EXPECT_TRUE(IsWhitespace(' ')); EXPECT_TRUE(IsWhitespace('\t')); EXPECT_FALSE(IsWhitespace('b')); EXPECT_FALSE(IsWhitespace('x')); EXPECT_FALSE(IsWhitespace('0')); EXPECT_TRUE(IsDelimiter(' ')); EXPECT_TRUE(IsDelimiter('\r')); EXPECT_TRUE(IsDelimiter('\n')); EXPECT_FALSE(IsDelimiter('b')); EXPECT_FALSE(IsDelimiter('x')); EXPECT_FALSE(IsDelimiter('0')); } TEST_F(LexerTestFixture, IsIdentifier) { EXPECT_TRUE(IsIdentifier("asbd")); EXPECT_TRUE(IsIdentifier("Xhdsa")); EXPECT_TRUE(IsIdentifier("JDASU")); EXPECT_TRUE(IsIdentifier("ash_dhuzd")); EXPECT_TRUE(IsIdentifier("XB_dhs_ASdzz")); EXPECT_TRUE(IsIdentifier("a20_30")); EXPECT_TRUE(IsIdentifier("_20_30")); EXPECT_TRUE(IsIdentifier("_ident")); EXPECT_TRUE(IsIdentifier("a")); EXPECT_TRUE(IsIdentifier("X")); EXPECT_TRUE(IsIdentifier("_")); EXPECT_TRUE(IsIdentifier("ab.cd")); EXPECT_TRUE(IsIdentifier("XHDHD.")); EXPECT_TRUE(IsIdentifier("XH.DHD")); EXPECT_TRUE(IsIdentifier("a.b.c.d.")); EXPECT_TRUE(IsIdentifier("a.b.c.d.erere")); EXPECT_TRUE(IsIdentifier(".b.c.d.erere")); EXPECT_TRUE(IsIdentifier(".")); EXPECT_TRUE(IsIdentifier(".fsa")); EXPECT_TRUE(IsIdentifier("......___....")); EXPECT_TRUE(IsIdentifier(".....")); EXPECT_FALSE(IsIdentifier("20_30")); EXPECT_FALSE(IsIdentifier("1")); EXPECT_FALSE(IsIdentifier("1abs")); EXPECT_FALSE(IsIdentifier("/§&dasdas")); EXPECT_FALSE(IsIdentifier("asdhdja_&dhas")); EXPECT_FALSE(IsIdentifier("-1273t67")); EXPECT_FALSE(IsIdentifier("1237")); EXPECT_FALSE(IsIdentifier(" ")); EXPECT_FALSE(IsIdentifier("-----")); } TEST_F(LexerTestFixture, IsString) { EXPECT_TRUE(IsString("asbd")); EXPECT_TRUE(IsString("A")); EXPECT_TRUE(IsString("Xh---dsa")); EXPECT_FALSE(IsString("Xh-\r-dsa")); EXPECT_FALSE(IsString("Xh-\n-dsa")); EXPECT_FALSE(IsString("Xh-\r\n-dsa")); EXPECT_FALSE(IsString("\r")); EXPECT_FALSE(IsString("\n")); EXPECT_FALSE(IsString(" ")); EXPECT_FALSE(IsString(" xx ")); } TEST_F(LexerTestFixture, IsInteger) { EXPECT_TRUE(IsInteger("-1")); EXPECT_TRUE(IsInteger("0")); EXPECT_TRUE(IsInteger("1")); EXPECT_TRUE(IsInteger("17262")); EXPECT_TRUE(IsInteger("-14726")); EXPECT_FALSE(IsInteger("-1x4726")); EXPECT_FALSE(IsInteger("1x4726")); EXPECT_FALSE(IsInteger("$1x4726")); EXPECT_FALSE(IsInteger("a-1x4726")); EXPECT_FALSE(IsInteger("X")); EXPECT_FALSE(IsInteger("123sa")); } TEST_F(LexerTestFixture, IsDelimiter) { EXPECT_TRUE(IsDelimiter('\n')); EXPECT_TRUE(IsDelimiter('\r')); EXPECT_TRUE(IsDelimiter(' ')); EXPECT_FALSE(IsDelimiter('a')); EXPECT_FALSE(IsDelimiter('x')); EXPECT_FALSE(IsDelimiter('_')); EXPECT_FALSE(IsDelimiter(0)); } TEST_F(LexerTestFixture, IsLinebreak) { EXPECT_TRUE(IsLinebreak("\r\n")); EXPECT_TRUE(IsLinebreak("\n")); EXPECT_FALSE(IsLinebreak("ads")); EXPECT_FALSE(IsLinebreak("")); EXPECT_FALSE(IsLinebreak("_")); } TEST_F(LexerTestFixture, IsWhitespace) { EXPECT_TRUE(IsWhitespace(' ')); EXPECT_TRUE(IsWhitespace('\t')); EXPECT_FALSE(IsWhitespace('x')); EXPECT_FALSE(IsWhitespace(0)); EXPECT_FALSE(IsWhitespace('_')); } TEST_F(LexerTestFixture, IdentifyTokenInString1) { std::string xx = "string"; CheckStringandAddToken(xx, ','); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::STRING); } TEST_F(LexerTestFixture, IdentifyTokenInString2) { std::string xx = "stringXX"; CheckStringandAddToken(xx); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::IDENTIFIER); } TEST_F(LexerTestFixture, IdentifyTokenInString3) { std::string xx = "hello"; CheckStringandAddToken(xx, 0); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::IDENTIFIER); } TEST_F(LexerTestFixture, IdentifyTokenInString4) { std::string xx = "43847"; CheckStringandAddToken(xx, 0); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::INTEGER_VALUE); } TEST_F(LexerTestFixture, IdentifyTokenInString5) { std::string xx = "438_47"; CheckStringandAddToken(xx, 0); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::STRING_VALUE); } TEST_F(LexerTestFixture, IdentifyTokenInString6) { std::string xx = ":"; CheckStringandAddToken(xx, 0); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::COLON); } TEST_F(LexerTestFixture, IdentifyTokenInString7) { std::string xx = "//"; CheckStringandAddToken(xx, 0); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::LINE_COMMENT); } TEST_F(LexerTestFixture, IdentifyTokenInString8) { std::string xx = "/*"; CheckStringandAddToken(xx, 0); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::COMMENT_BLOCK_START); } TEST_F(LexerTestFixture, IdentifyTokenInString9) { std::string xx = "*/"; CheckStringandAddToken(xx, 0); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::COMMENT_BLOCK_END); } TEST_F(LexerTestFixture, IdentifyTokenInString10) { std::string xx = ","; CheckStringandAddToken(xx, 0); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::COMMA); } TEST_F(LexerTestFixture, IdentifyTokenInString11) { std::string xx = "{"; CheckStringandAddToken(xx, 0); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::BRACKET_OPEN); } TEST_F(LexerTestFixture, IdentifyTokenInString12) { std::string xx = "link"; CheckStringandAddToken(xx, ','); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::LINK); } TEST_F(LexerTestFixture, IdentifyTokenInString13) { std::string xx = "linkXX"; CheckStringandAddToken(xx); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::IDENTIFIER); } TEST_F(LexerTestFixture, IdentifyTokenInString14) { std::string xx = "}"; CheckStringandAddToken(xx); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::BRACKET_CLOSE); } TEST_F(LexerTestFixture, IdentifyTokenInString15) { std::string xx = "id"; CheckStringandAddToken(xx, ','); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::ID); } TEST_F(LexerTestFixture, IdentifyTokenInString16) { std::string xx = "idXX"; CheckStringandAddToken(xx); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::IDENTIFIER); } TEST_F(LexerTestFixture, IdentifyTokenInString17) { std::string xx = "type"; CheckStringandAddToken(xx, '{'); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::TYPE); } TEST_F(LexerTestFixture, IdentifyTokenInString18) { std::string xx = "typeXX"; CheckStringandAddToken(xx); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::IDENTIFIER); } TEST_F(LexerTestFixture, IdentifyTokenInString19) { std::string xx = "enum"; CheckStringandAddToken(xx, '{'); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::ENUM); } TEST_F(LexerTestFixture, IdentifyTokenInString20) { std::string xx = "enumXX"; CheckStringandAddToken(xx); EXPECT_EQ(token_list->size(), 1); EXPECT_EQ(token_list->front().GetTokenType(), TokenType::IDENTIFIER); }
Polas/omim
routing/transit_info.hpp
<reponame>Polas/omim<gh_stars>1-10 #pragma once #include "transit/transit_types.hpp" #include "base/assert.hpp" #include <memory> #include <utility> #include <vector> namespace routing { class TransitInfo final { public: enum class Type { Gate, Edge, Transfer }; struct Edge { Edge() = default; explicit Edge(transit::Edge const & edge) : m_lineId(edge.GetLineId()) , m_stop1Id(edge.GetStop1Id()) , m_stop2Id(edge.GetStop2Id()) , m_shapeIds(edge.GetShapeIds()) { ASSERT(!edge.GetTransfer(), ()); } transit::LineId m_lineId = transit::kInvalidLineId; transit::StopId m_stop1Id = transit::kInvalidStopId; transit::StopId m_stop2Id = transit::kInvalidStopId; std::vector<transit::ShapeId> m_shapeIds; }; struct Gate { Gate() = default; explicit Gate(transit::Gate const & gate) : m_featureId(gate.GetFeatureId()) {} transit::FeatureId m_featureId = transit::kInvalidFeatureId; }; struct Transfer { Transfer() = default; explicit Transfer(transit::Edge const & edge) : m_stop1Id(edge.GetStop1Id()), m_stop2Id(edge.GetStop2Id()) { ASSERT(edge.GetTransfer(), ()); } transit::StopId m_stop1Id = transit::kInvalidStopId; transit::StopId m_stop2Id = transit::kInvalidStopId; }; explicit TransitInfo(transit::Gate const & gate) : m_type(Type::Gate), m_edge(), m_gate(gate), m_transfer() { } explicit TransitInfo(transit::Edge const & edge) : m_type(edge.GetTransfer() ? Type::Transfer : Type::Edge) , m_edge(edge.GetTransfer() ? Edge() : Edge(edge)) , m_gate() , m_transfer(edge.GetTransfer() ? Transfer(edge) : Transfer()) { } Type GetType() const { return m_type; } Edge const & GetEdge() const { ASSERT_EQUAL(m_type, Type::Edge, ()); return m_edge; } Gate const & GetGate() const { ASSERT_EQUAL(m_type, Type::Gate, ()); return m_gate; } Transfer const & GetTransfer() const { ASSERT_EQUAL(m_type, Type::Transfer, ()); return m_transfer; } private: Type const m_type; // Valid for m_type == Type::Edge only. Edge const m_edge; // Valid for m_type == Type::Gate only. Gate const m_gate; // Valid for m_type == Type::Transfer only. Transfer const m_transfer; }; class TransitInfoWrapper final { public: explicit TransitInfoWrapper(std::unique_ptr<TransitInfo> ptr) : m_ptr(move(ptr)) {} explicit TransitInfoWrapper(TransitInfoWrapper && rhs) { swap(m_ptr, rhs.m_ptr); } explicit TransitInfoWrapper(TransitInfoWrapper const & rhs) { if (rhs.m_ptr) m_ptr = std::make_unique<TransitInfo>(*rhs.m_ptr); } TransitInfoWrapper & operator=(TransitInfoWrapper && rhs) { swap(m_ptr, rhs.m_ptr); return *this; } TransitInfoWrapper & operator=(TransitInfoWrapper const & rhs) { m_ptr.reset(); if (rhs.m_ptr) m_ptr = std::make_unique<TransitInfo>(*rhs.m_ptr); return *this; } bool HasTransitInfo() const { return m_ptr != nullptr; } TransitInfo const & Get() const { ASSERT(HasTransitInfo(), ()); return *m_ptr; } void Set(std::unique_ptr<TransitInfo> ptr) { m_ptr = move(ptr); } private: std::unique_ptr<TransitInfo> m_ptr; }; inline std::string DebugPrint(TransitInfo::Type type) { switch (type) { case TransitInfo::Type::Gate: return "Gate"; case TransitInfo::Type::Edge: return "Edge"; case TransitInfo::Type::Transfer: return "Transfer"; } UNREACHABLE(); } } // namespace routing
grishkam/QuickFigures
QuickFigures/appMenus/popupMenusForComplexObjects/FigureOrganizingSuplierForPopup.java
<reponame>grishkam/QuickFigures /******************************************************************************* * Copyright (c) 2021 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ /** * Author: <NAME> * Date Modified: April 18, 2021 * Version: 2021.1 */ package popupMenusForComplexObjects; import java.awt.Container; import java.awt.Dimension; import java.awt.Rectangle; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import javax.swing.JMenu; import javax.swing.JMenuItem; import javax.swing.JPopupMenu; import javax.swing.undo.UndoableEdit; import channelMerging.CSFLocation; import channelMerging.ChannelEntry; import channelMerging.MultiChannelImage; import channelMerging.ImageDisplayLayer; import channelMerging.PreProcessInformation; import fLexibleUIKit.MenuItemExecuter; import fLexibleUIKit.MenuItemMethod; import figureEditDialogs.ScaleLevelInputDialog; import figureEditDialogs.WindowLevelDialog; import figureFormat.TemplateUserMenuAction; import figureOrganizer.FigureOrganizingLayerPane; import figureOrganizer.LabelCreationOptions; import figureOrganizer.MultichannelDisplayLayer; import graphicActionToolbar.CurrentFigureSet; import graphicalObjects.ZoomableGraphic; import graphicalObjects_LayoutObjects.DefaultLayoutGraphic; import graphicalObjects_SpecialObjects.ComplexTextGraphic; import graphicalObjects_SpecialObjects.ImagePanelGraphic; import graphicalObjects_SpecialObjects.TextGraphic; import iconGraphicalObjects.ChannelUseIcon; import iconGraphicalObjects.CropIconGraphic; import iconGraphicalObjects.IconUtil; import icons.SourceImageTreeIcon; import icons.ToolIconWithText; import imageDisplayApp.CanvasOptions; import imageScaling.Interpolation; import imageScaling.ScaleInformation; import layout.basicFigure.BasicLayout; import layout.basicFigure.LayoutSpaces; import logging.IssueLog; import menuUtil.SmartJMenu; import menuUtil.SmartPopupJMenu; import menuUtil.BasicSmartMenuItem; import menuUtil.PopupMenuSupplier; import multiChannelFigureUI.ChannelPanelEditingMenu; import objectDialogs.CroppingDialog; import objectDialogs.CroppingDialog.CropDialogContext; import standardDialog.StandardDialog; import storedValueDialog.StoredValueDilaog; import undo.AbstractUndoableEdit2; import undo.CanvasResizeUndo; import undo.ChannelDisplayUndo; import undo.CombinedEdit; import undo.PreprocessChangeUndo; import undo.UndoLayoutEdit; import undo.UndoScalingAndRotation; /**A menu for a figure organizing layer*/ public class FigureOrganizingSuplierForPopup implements PopupMenuSupplier, LayoutSpaces, ActionListener { FigureOrganizingLayerPane figureOrganizingLayerPane; JMenuItem addImageFromFileButton; private JMenuItem addOpenImageFromList; private JMenuItem rowLabelButton; private JMenuItem columnLabelButton; private JMenuItem recreatePanelsButton; private JMenuItem minMaxButton5; private JMenuItem windowLevelButton; private JMenuItem channelUseOptionsButton; private JMenuItem panelLabelButton; private JMenuItem recropPanelsButton; private JMenuItem reScalePanelsButton; private JMenuItem rePanelSizePanelsButton; private JMenuItem rePPIPanelsButton; public FigureOrganizingSuplierForPopup(FigureOrganizingLayerPane figureOrganizingLayerPane) { this.figureOrganizingLayerPane=figureOrganizingLayerPane; } @Override public JPopupMenu getJPopup() { SmartPopupJMenu jj = new SmartPopupJMenu(); addMenus(jj); return jj; } /**Adds the menu items from this popup to an arbitrary container*/ protected void addMenus(Container jj) { JMenu imagesMenu = new SmartJMenu("Images", new SourceImageTreeIcon()); JMenu addImage=new SmartJMenu("Add Image",new SourceImageTreeIcon()); jj.add(addImage); addImageFromFileButton = new BasicSmartMenuItem("Image From File"); addImage.add(addImageFromFileButton); addImageFromFileButton.addActionListener(this); addOpenImageFromList = new BasicSmartMenuItem("Currently Open Image"); addImage.add(addOpenImageFromList); addOpenImageFromList.addActionListener(this); JMenu labelMenu = new SmartJMenu("Add Labels", ComplexTextGraphic.createImageIcon()); rowLabelButton = new BasicSmartMenuItem("Generate Row Labels", new ToolIconWithText(0, ROW_OF_PANELS).getMenuVersion()); labelMenu.add(rowLabelButton); rowLabelButton.addActionListener(this); columnLabelButton = new BasicSmartMenuItem("Generate Column Labels", new ToolIconWithText(0, COLUMN_OF_PANELS).getMenuVersion()); labelMenu.add(columnLabelButton); columnLabelButton.addActionListener(this); panelLabelButton = new BasicSmartMenuItem("Generate Panel Labels", new ToolIconWithText(0, PANELS).getMenuVersion()); labelMenu.add(panelLabelButton); panelLabelButton.addActionListener(this); new MenuItemExecuter(this).addToJMenu(labelMenu); jj.add(labelMenu); recropPanelsButton= new BasicSmartMenuItem("Re-Crop All Images"); recropPanelsButton.addActionListener(this); recropPanelsButton.setIcon( CropIconGraphic.createsCropIcon()); imagesMenu.add(recropPanelsButton); reScalePanelsButton= new BasicSmartMenuItem("Re-Set Scale for All Images"); reScalePanelsButton.addActionListener(this); imagesMenu.add(reScalePanelsButton); rePanelSizePanelsButton= new BasicSmartMenuItem("Re-size panels without scale Re-Set"); rePanelSizePanelsButton.addActionListener(this); imagesMenu.add(rePanelSizePanelsButton); recreatePanelsButton = new BasicSmartMenuItem("Recreate All Panels"); jj.add(recreatePanelsButton); recreatePanelsButton.addActionListener(this); jj.add(imagesMenu); rePPIPanelsButton=new BasicSmartMenuItem("Re-Set Pixel Density for All Images"); rePPIPanelsButton.addActionListener(this); imagesMenu.add(rePPIPanelsButton); if (figureOrganizingLayerPane.getMontageLayoutGraphic()!=null) { FigureScalerMenu figureScaler = new FigureScalerMenu(figureOrganizingLayerPane.getMontageLayoutGraphic()); imagesMenu.add(figureScaler.createRescaleMenuItem()); jj.add(figureScaler); } JMenu chanMen=new SmartJMenu("All Channels"); channelUseOptionsButton = new BasicSmartMenuItem("Channel Use", new ChannelUseIcon()); chanMen.add(channelUseOptionsButton); channelUseOptionsButton.addActionListener(this); minMaxButton5 = new BasicSmartMenuItem("Min/Max"); minMaxButton5.setIcon(IconUtil.createBrightnessIcon()); chanMen.add(minMaxButton5); minMaxButton5.addActionListener(this); windowLevelButton = new BasicSmartMenuItem("Window/Level"); chanMen.add(windowLevelButton); windowLevelButton.setIcon(IconUtil.createBrightnessIcon()); windowLevelButton.addActionListener(this); try {addRecolorMenu(chanMen);} catch (Throwable t) {IssueLog.logT(t);}; jj.add(chanMen); SmartJMenu excluders = this.getMenuContext().createChannelMergeMenu(ChannelPanelEditingMenu.EXCLUDED_CHANNEL_AND_DONT_MERGE); excluders.setIcon(new ChannelUseIcon()); jj.add(excluders); jj.add(TemplateUserMenuAction.createFormatMenu(figureOrganizingLayerPane)); } /**Adds the recolor channels menu*/ public void addRecolorMenu(JMenu j) { MultiChannelImage mw = getPrimaryMultichannelWrapper(); ArrayList<ChannelEntry> iFin = mw.getChannelEntriesInOrder(); ChannelPanelEditingMenu bit = new ChannelPanelEditingMenu(figureOrganizingLayerPane, iFin.get(0).getOriginalChannelIndex()); bit.addChenEntryColorMenus( j, iFin); } public MultiChannelImage getPrimaryMultichannelWrapper() { return figureOrganizingLayerPane.getPrincipalMultiChannel().getMultiChannelImage(); } @Override public void actionPerformed(ActionEvent arg0) { Object source = arg0.getSource(); CombinedEdit undo=null ; if (source==addImageFromFileButton) { IssueLog.log("about to open image "); undo=figureOrganizingLayerPane.nextMultiChannel(true); } if (source==addOpenImageFromList) { undo=figureOrganizingLayerPane.nextMultiChannel(false); } if (source==rowLabelButton||source==columnLabelButton||source==panelLabelButton) { int type=BasicLayout.ROWS; if(source==columnLabelButton) type=BasicLayout.COLS; if(source==panelLabelButton) type=BasicLayout.PANELS; CombinedEdit many = figureOrganizingLayerPane.addRowOrColLabel(type); /**Adds to the undo manager*/ figureOrganizingLayerPane.getUndoManager().addEdit(many); } if (source==recreatePanelsButton) { figureOrganizingLayerPane.recreateFigurePanels(); } if (source ==recropPanelsButton) { undo= recropAll(); } if (source ==reScalePanelsButton) { undo=showReScaleAll(); } if (source ==rePanelSizePanelsButton) { undo=showReDoPanelSizeAll(); } if (source ==rePPIPanelsButton) { undo=showRePPIAll(); } ChannelPanelEditingMenu bit = getMenuContext(); if (source==minMaxButton5) { CombinedEdit undoMinMax = ChannelDisplayUndo.createMany(figureOrganizingLayerPane.getAllSourceImages(), bit); undo=undoMinMax; WindowLevelDialog.showWLDialogs(getPrimaryMultichannelWrapper().getChannelEntriesInOrder(), getPrimaryMultichannelWrapper(), bit, WindowLevelDialog.MIN_MAX, undoMinMax); } if (source==windowLevelButton) { CombinedEdit undoMinMax = ChannelDisplayUndo.createMany(figureOrganizingLayerPane.getAllSourceImages(), bit); undo=undoMinMax; WindowLevelDialog.showWLDialogs(getPrimaryMultichannelWrapper().getChannelEntriesInOrder(), getPrimaryMultichannelWrapper(), bit, WindowLevelDialog.WINDOW_LEVEL, undoMinMax); } if (source==channelUseOptionsButton){ figureOrganizingLayerPane.showChannelUseOptions(); } figureOrganizingLayerPane.getUndoManager().addEdit(undo); } /** generates a channel panel editing menu context for this popup menu */ public ChannelPanelEditingMenu getMenuContext() { return new ChannelPanelEditingMenu( figureOrganizingLayerPane, ChannelPanelEditingMenu.ALL_IMAGES_IN_CLICKED_FIGURE); } /**returns a label editor for the given text item*/ public EditLabels getLabelEditorMenuItemFor(TextGraphic t) { int gridSnap = t.getAttachmentPosition().getGridSpaceCode(); EditLabels output = new EditLabels(gridSnap, figureOrganizingLayerPane.getMontageLayoutGraphic(), t); if(output.getLabels(t).size()==0) { return null; } return output; } /**Opens a dialog to recrop all the panels * @return */ public CombinedEdit recropAll() { MultichannelDisplayLayer crop1 = (MultichannelDisplayLayer) figureOrganizingLayerPane.getPrincipalMultiChannel(); ArrayList<ImageDisplayLayer> all = figureOrganizingLayerPane.getMultiChannelDisplays(); return recropManyImages(crop1, all); } /**shows a dialog for changing the drop area for many multichannel images within the figure*/ public static CombinedEdit recropManyImages(MultichannelDisplayLayer crop1, ArrayList<? extends ImageDisplayLayer> all) { CombinedEdit output = new CombinedEdit(); CropDialogContext context = new CroppingDialog.CropDialogContext(all.size()+1); output.addEditToList( showRecropDisplayDialog( crop1, null, null, context) ); PreProcessInformation modifications = crop1.getSlot().getModifications(); Rectangle r1=null; Dimension d1; Interpolation interpolate=null; if (modifications!=null) { r1= modifications.getRectangle(); interpolate=modifications.getInterpolationType(); } if (r1==null) { d1=crop1.getMultiChannelImage().getDimensions(); }else d1=new Dimension(r1.width, r1.height); for(ImageDisplayLayer crop2: all) { if(crop2==crop1) continue; output.addEditToList( showRecropDisplayDialog( (MultichannelDisplayLayer) crop2, d1, interpolate, context) ); } if (CanvasOptions.current.resizeCanvasAfterEdit) output.addEditToList( CurrentFigureSet.canvasResizeUndoable() ); return output; } /**shows a cropping dialog*/ public static CombinedEdit showRecropDisplayDialog(MultichannelDisplayLayer display, Dimension dim, Interpolation interpolate, CropDialogContext context) { PreProcessInformation original = display.getSlot().getModifications(); display.getPanelManager().setupViewLocation(); PreprocessChangeUndo undo1 = new PreprocessChangeUndo(display); CSFLocation csfInitial = display.getSlot().getDisplaySlice().duplicate(); CroppingDialog.showCropDialogOfSize(display.getSlot(), dim, context); onViewLocationChange(display, csfInitial, display.getSlot().getDisplaySlice()); if (display.getSlot().getModifications()!=null&&display.getSlot().getModifications().isSame(original) ) { return null; } undo1.establishFinalLocations(); return new CombinedEdit(undo1, updateRowColSizesOf(display)); } /**Called if the user switches slices or channels*/ private static void onViewLocationChange(MultichannelDisplayLayer display, CSFLocation i, CSFLocation f) { if (!display.getPanelManager().selectsSlicesOrFrames()) return; if (f.changesT(i) ) { display.getPanelManager().performReplaceOfIndex( CSFLocation.frameLocation(i.frame), CSFLocation.frameLocation(f.frame) ); } if (f.changesZ(i) ) { display.getPanelManager().performReplaceOfIndex( CSFLocation.sliceLocation(i.slice), CSFLocation.sliceLocation(f.slice) ); } } /**called to resize the layout in order to match the dimensions of object within the layout*/ public static UndoLayoutEdit updateRowColSizesOf(MultichannelDisplayLayer display) { if (display.getParentLayer() instanceof FigureOrganizingLayerPane) { FigureOrganizingLayerPane f=(FigureOrganizingLayerPane) display.getParentLayer(); DefaultLayoutGraphic l = f.getMontageLayoutGraphic(); if(l!=null) { l.generateCurrentImageWrapper(); UndoLayoutEdit undo = new UndoLayoutEdit(l); l.getEditor().alterPanelWidthAndHeightToFitContents(l.getPanelLayout()); undo.establishFinalLocations(); return undo; } } return null; } /**shows a dialog for changing the scale factor of many multichannel images within the figure*/ CombinedEdit showReScaleAll() { CombinedEdit output = showReScaleAllDisplayDialog((MultichannelDisplayLayer) figureOrganizingLayerPane.getPrincipalMultiChannel()); CanvasResizeUndo output2 = CurrentFigureSet.canvasResizeUndoable(); return new CombinedEdit(output, output2); } /**shows a dialog for changing the panelSize within the figure*/ CombinedEdit showReDoPanelSizeAll() { CombinedEdit output = showReDoPanelSizeAllDisplayDialog((MultichannelDisplayLayer) figureOrganizingLayerPane.getPrincipalMultiChannel()); CanvasResizeUndo output2 = CurrentFigureSet.canvasResizeUndoable(); return new CombinedEdit(output, output2); } /**shows a dialog for changing the scale factor of many multichannel images within the figure*/ CombinedEdit showReScaleAllDisplayDialog(MultichannelDisplayLayer display) { CombinedEdit output = new CombinedEdit(); ScaleInformation newScale = showRescaleDialogSingleFor(display); output.addEditToList( applyNewScaleTo(display, newScale) ); ArrayList<ImageDisplayLayer> all = figureOrganizingLayerPane.getMultiChannelDisplays(); for(ImageDisplayLayer crop2: all) { if(crop2==display) continue; output.addEditToList( applyNewScaleTo((MultichannelDisplayLayer) crop2, newScale)); } return output; } /**shows a dialog for changing the scale factor of many multichannel images within the figure*/ CombinedEdit showReDoPanelSizeAllDisplayDialog(MultichannelDisplayLayer display) { CombinedEdit output = new CombinedEdit(); double newScale = showDoPanelSizeDialogSingleFor(display); ArrayList<ImageDisplayLayer> all = figureOrganizingLayerPane.getMultiChannelDisplays(); for(ImageDisplayLayer crop2: all) { output.addEditToList( applyNewPanelSizeTo((MultichannelDisplayLayer) crop2, newScale)); } return output; } /**shows a dialog for the user to input a pixel density for all the images*/ private CombinedEdit showRePPIAll() { CombinedEdit output = new CombinedEdit(); double newPPI = showPPISingleImage(figureOrganizingLayerPane.getPrincipalMultiChannel()); ArrayList<ImageDisplayLayer> all = figureOrganizingLayerPane.getMultiChannelDisplays(); for(ImageDisplayLayer crop2: all) { output.addEditToList( ((MultichannelDisplayLayer) crop2).getPanelManager().changePPI(newPPI) ); } return output; } /**shows a dialog for the user to input a pixel density for an image*/ private double showPPISingleImage(ImageDisplayLayer principalMultiChannel) { ImagePanelGraphic panel = principalMultiChannel.getPanelManager().getPanelList().getPanels().get(0).getPanelGraphic(); double ppi = panel.getQuickfiguresPPI(); double newppi=StandardDialog.getNumberFromUser("Input Pixels per inch ", ppi); return newppi; } /**shows a dialog for changing the scale factor one image*/ public static ScaleInformation showSingleImageRescale(MultichannelDisplayLayer display) { ScaleInformation newScale = showRescaleDialogSingleFor(display); applyNewScaleTo(display, newScale); return newScale; } /**shows a rescale dialog for the single image*/ protected static ScaleInformation showRescaleDialogSingleFor(MultichannelDisplayLayer display) { PreProcessInformation original = display.getSlot().getModifications(); ScaleInformation oldScale =new ScaleInformation(); if (original!=null) oldScale= original.getScaleInformation(); ScaleInformation newScale = ScaleLevelInputDialog.showUserTheDialog(oldScale); return newScale; } /**a dialog to scale the panel objects only*/ protected static double showDoPanelSizeDialogSingleFor(MultichannelDisplayLayer display) { double original = display.getPanelList().getPanelGraphics().get(0).getRelativeScale(); double newScale = StandardDialog.getNumberFromUser("Change Panel Size",1); return newScale* original; } /**Sets a new preprocess scale for the image, panels will be resized by this change and layout rows and columns will also be resized*/ public static AbstractUndoableEdit2 applyNewScaleTo(MultichannelDisplayLayer display, ScaleInformation newScale) { if (display.getPreprocessScale()==newScale) return null; PreprocessChangeUndo output1 = new PreprocessChangeUndo(display); display.setPreprocessScale(newScale); output1.establishFinalLocations(); UndoLayoutEdit output2 = updateRowColSizesOf(display); return new CombinedEdit(output1,output2 ); } /**applies a new panel size to all of the images in the the layer * @param layer * @param newScale * @return */ private UndoableEdit applyNewPanelSizeTo(MultichannelDisplayLayer layer, double newScale) { CombinedEdit undoOutPut = new CombinedEdit(); for(ZoomableGraphic object:layer.getAllGraphics()) { if (object instanceof ImagePanelGraphic) { ImagePanelGraphic imagepanel=(ImagePanelGraphic) object; UndoScalingAndRotation undo = new UndoScalingAndRotation(imagepanel); imagepanel.setRelativeScale(newScale); undo.establishFinalState(); undoOutPut.addEditToList(undo); } } undoOutPut.addEditToList( updateRowColSizesOf(layer)); return undoOutPut; } /**shows a labeling options dialog*/ @MenuItemMethod(menuActionCommand = "Label Creation Options", menuText = "Label Creation Options") public void changeLabelProperties() { new StoredValueDilaog(LabelCreationOptions.current).showDialog();; } }
npocmaka/Windows-Server-2003
inetsrv/iis/utils/metautil/metaschm.cpp
<filename>inetsrv/iis/utils/metautil/metaschm.cpp /*=================================================================== Microsoft Denali Microsoft Confidential. Copyright 1997 Microsoft Corporation. All Rights Reserved. Component: MetaUtil object File: MetaSchm.cpp Owner: t-BrianM This file contains implementation of the CMetaSchemaTable object and other schema related objects. The CMetaSchemaTable object has COM style reference counting so it can service objects created by CMetaUtil. I didn't make it a full blown COM object because all of the class stuff would be a pain to export. To reduce the overhead of maintaining this object (which may or may not be used), all information is loaded on demand, then set dirty or unloaded when portions of the metabase associated with it are modified. ===================================================================*/ #include "stdafx.h" #include "MetaUtil.h" #include "MUtilObj.h" #include "MetaSchm.h" /*------------------------------------------------------------------ * C P r o p I n f o */ /*=================================================================== CPropInfo::Init Constructor Parameters: dwId Id of property Returns: S_OK on success ===================================================================*/ HRESULT CPropInfo::Init(DWORD dwId) { m_dwId = dwId; return S_OK; } /*=================================================================== CPropInfo::SetName Sets the property name. Parameters: tszName Name of property Returns: E_OUTOFMEMORY if allocation fails S_OK on success ===================================================================*/ HRESULT CPropInfo::SetName(LPCTSTR tszName) { ASSERT_STRING(tszName); ASSERT(m_tszName == NULL); // m_tszName not yet set m_tszName = new TCHAR[_tcslen(tszName) + 1]; if (m_tszName == NULL) { return E_OUTOFMEMORY; } _tcscpy(m_tszName, tszName); return S_OK; } /*=================================================================== CPropInfo::SetTypeInfo Sets the property name. Parameters: pType PropValue structure containing type information. Returns: E_OUTOFMEMORY if allocation fails S_OK on success ===================================================================*/ HRESULT CPropInfo::SetTypeInfo(PropValue *pType) { ASSERT_POINTER(pType, PropValue); ASSERT(m_pType == NULL); // m_pType not yet set m_pType = new PropValue; if (m_pType == NULL) { return E_OUTOFMEMORY; } memcpy(m_pType, pType, sizeof(PropValue)); return S_OK; } /*------------------------------------------------------------------ * C P r o p I n f o T a b l e */ /*=================================================================== CPropInfoTable::CPropInfoTable Constructor Parameters: None Returns: Nothing ===================================================================*/ CPropInfoTable::CPropInfoTable() : m_fLoaded(FALSE) { // Clear the hash tables memset(m_rgCPropIdTable, 0, PROPERTY_HASH_SIZE * sizeof(CPropInfo *)); memset(m_rgCPropNameTable, 0, PROPERTY_HASH_SIZE * sizeof(CPropInfo *)); } /*=================================================================== CPropInfoTable::~CPropInfoTable Destructor Parameters: None Returns: Nothing ===================================================================*/ CPropInfoTable::~CPropInfoTable() { if (m_fLoaded) { Unload(); } } /*=================================================================== CPropInfoTable::Load Loads properties from the "_Machine_/Schema/Properties" key into the property information table. On failure, recovers by unloading everything. Parameters: pIMeta ATL Smart pointer to the metabase hMDComp Open metabase handle to "_Machine_" key Returns: E_OUTOFMEMORY on allocation failure S_OK on success ===================================================================*/ HRESULT CPropInfoTable::Load(CComPtr<IMSAdminBase> &pIMeta, METADATA_HANDLE hMDComp) { //If it's already loaded, unload then reload if (m_fLoaded) { Unload(); } USES_CONVERSION; HRESULT hr; int iDataIndex; METADATA_RECORD mdrDataRec; DWORD dwReqDataLen; DWORD dwReturnBufLen; UCHAR *lpReturnBuf = NULL; unsigned int uiLoc; CPropInfo *pCNewProp; METADATA_HANDLE hMDNames = NULL; METADATA_HANDLE hMDTypes = NULL; //Setup the return buffer dwReturnBufLen = 1024; lpReturnBuf = new UCHAR[dwReturnBufLen]; if (lpReturnBuf == NULL) return E_OUTOFMEMORY; // Open the Schema/Properties/Names subkey hr = pIMeta->OpenKey(hMDComp, L"Schema/Properties/Names", METADATA_PERMISSION_READ, MUTIL_OPEN_KEY_TIMEOUT, &hMDNames); if (FAILED(hr)) { delete lpReturnBuf; return hr; }; // For each name iDataIndex = 0; mdrDataRec.dwMDIdentifier = 0; mdrDataRec.dwMDAttributes = METADATA_NO_ATTRIBUTES; mdrDataRec.dwMDUserType = ALL_METADATA; mdrDataRec.dwMDDataType = ALL_METADATA; mdrDataRec.dwMDDataLen = dwReturnBufLen; mdrDataRec.pbMDData = (PBYTE) lpReturnBuf; mdrDataRec.dwMDDataTag = 0; hr = pIMeta->EnumData(hMDNames, NULL, &mdrDataRec, iDataIndex, &dwReqDataLen); while (SUCCEEDED(hr)) { // Make sure we got a string if (mdrDataRec.dwMDDataType != STRING_METADATA) { hr = HRESULT_FROM_WIN32(ERROR_INVALID_DATA); goto LError; } // Create the property object pCNewProp = new CPropInfo; if (pCNewProp == NULL) { hr = E_OUTOFMEMORY; goto LError; } hr = pCNewProp->Init(mdrDataRec.dwMDIdentifier); if (FAILED(hr)) { delete pCNewProp; goto LError; } hr = pCNewProp->SetName(W2T(reinterpret_cast<LPWSTR> (lpReturnBuf))); if (FAILED(hr)) { delete pCNewProp; goto LError; } // Add it to the Id hash table uiLoc = IdHash(mdrDataRec.dwMDIdentifier); pCNewProp->m_pCIdHashNext = m_rgCPropIdTable[uiLoc]; m_rgCPropIdTable[uiLoc] = pCNewProp; // Add it to the Name hash table uiLoc = NameHash(pCNewProp->m_tszName); pCNewProp->m_pCNameHashNext = m_rgCPropNameTable[uiLoc]; m_rgCPropNameTable[uiLoc] = pCNewProp; iDataIndex++; mdrDataRec.dwMDIdentifier = 0; mdrDataRec.dwMDAttributes = METADATA_NO_ATTRIBUTES; mdrDataRec.dwMDUserType = ALL_METADATA; mdrDataRec.dwMDDataType = ALL_METADATA; mdrDataRec.dwMDDataLen = dwReturnBufLen; mdrDataRec.pbMDData = (PBYTE) lpReturnBuf; mdrDataRec.dwMDDataTag = 0; hr = pIMeta->EnumData(hMDNames, NULL, &mdrDataRec, iDataIndex, &dwReqDataLen); } // Make sure we ran out of items if (HRESULT_CODE(hr) != ERROR_NO_MORE_ITEMS) { goto LError; } // Close the Schema/Properties/Names sub-key pIMeta->CloseKey(hMDNames); hMDNames = NULL; // Open the Schema/Properties/Types sub-key hr = pIMeta->OpenKey(hMDComp, L"Schema/Properties/Types", METADATA_PERMISSION_READ, MUTIL_OPEN_KEY_TIMEOUT, &hMDTypes); if (FAILED(hr)) { goto LError; }; // For each type iDataIndex = 0; mdrDataRec.dwMDIdentifier = 0; mdrDataRec.dwMDAttributes = METADATA_NO_ATTRIBUTES; mdrDataRec.dwMDUserType = ALL_METADATA; mdrDataRec.dwMDDataType = ALL_METADATA; mdrDataRec.dwMDDataLen = dwReturnBufLen; mdrDataRec.pbMDData = (PBYTE) lpReturnBuf; mdrDataRec.dwMDDataTag = 0; hr = pIMeta->EnumData(hMDTypes, NULL, &mdrDataRec, iDataIndex, &dwReqDataLen); while (SUCCEEDED(hr)) { // Make sure we got binary data if (mdrDataRec.dwMDDataType != BINARY_METADATA) { hr = HRESULT_FROM_WIN32(ERROR_INVALID_DATA); goto LError; } // Look for an existing property object for this Id pCNewProp = GetPropInfo(mdrDataRec.dwMDIdentifier); if (pCNewProp == NULL) { // Create the property object pCNewProp = new CPropInfo; if (pCNewProp == NULL) { hr = E_OUTOFMEMORY; goto LError; } hr = pCNewProp->Init(mdrDataRec.dwMDIdentifier); if (FAILED(hr)) { delete pCNewProp; goto LError; } // Add it to the Id hash table uiLoc = IdHash(mdrDataRec.dwMDIdentifier); pCNewProp->m_pCIdHashNext = m_rgCPropIdTable[uiLoc]; m_rgCPropIdTable[uiLoc] = pCNewProp; } // Add type information to the property object pCNewProp->SetTypeInfo(reinterpret_cast<PropValue *> (lpReturnBuf)); iDataIndex++; mdrDataRec.dwMDIdentifier = 0; mdrDataRec.dwMDAttributes = METADATA_NO_ATTRIBUTES; mdrDataRec.dwMDUserType = ALL_METADATA; mdrDataRec.dwMDDataType = ALL_METADATA; mdrDataRec.dwMDDataLen = dwReturnBufLen; mdrDataRec.pbMDData = (PBYTE) lpReturnBuf; mdrDataRec.dwMDDataTag = 0; hr = pIMeta->EnumData(hMDTypes, NULL, &mdrDataRec, iDataIndex, &dwReqDataLen); } // Make sure we ran out of items if (HRESULT_CODE(hr) != ERROR_NO_MORE_ITEMS) { goto LError; } // Close the Schema/Properties/Types sub-key pIMeta->CloseKey(hMDTypes); hMDTypes = NULL; delete lpReturnBuf; m_fLoaded = TRUE; return S_OK; LError: if (hMDNames != NULL) { pIMeta->CloseKey(hMDNames); } if (hMDTypes != NULL) { pIMeta->CloseKey(hMDTypes); } if (lpReturnBuf != NULL) { delete lpReturnBuf; } // Cleanup the entries we loaded Unload(); return hr; } /*=================================================================== CPropInfoTable::Unload Unloads the property information table. Parameters: None Returns: Nothing ===================================================================*/ void CPropInfoTable::Unload() { int iIndex; CPropInfo *pCDeleteProp; //Clear the Name table memset(m_rgCPropNameTable, 0, PROPERTY_HASH_SIZE * sizeof(CPropInfo *)); // For each Id hash table entry for (iIndex =0; iIndex < PROPERTY_HASH_SIZE; iIndex++) { // While the entry is not empty while (m_rgCPropIdTable[iIndex] != NULL) { // Nuke the first table entry pCDeleteProp = m_rgCPropIdTable[iIndex]; m_rgCPropIdTable[iIndex] = pCDeleteProp->m_pCIdHashNext; delete pCDeleteProp; } } m_fLoaded = FALSE; } /*=================================================================== CPropInfoTable::GetPropInfo Gets property information from the table based on property id Parameters: dwId Id of property to get Returns: NULL if property not found or error Pointer to CPropInfo class on success ===================================================================*/ CPropInfo *CPropInfoTable::GetPropInfo(DWORD dwId) { CPropInfo *pCCurProp; // Go to the table location pCCurProp = m_rgCPropIdTable[IdHash(dwId)]; // Look at all of the entries while ((pCCurProp != NULL) && (pCCurProp->m_dwId != dwId)) { pCCurProp = pCCurProp->m_pCIdHashNext; } return pCCurProp; // Will be NULL if not found } /*=================================================================== CPropInfoTable::GetPropInfo Gets property information from the table based on property name. Case insensitive. Parameters: tszName Name of property to get Returns: NULL if property not found or error Pointer to CPropInfo class on success ===================================================================*/ CPropInfo *CPropInfoTable::GetPropInfo(LPCTSTR tszName) { CPropInfo *pCCurProp; // Go to the table location pCCurProp = m_rgCPropNameTable[NameHash(tszName)]; // Look at all of the entries while ((pCCurProp != NULL) && (_tcsicmp(pCCurProp->m_tszName, tszName) != 0)) { pCCurProp = pCCurProp->m_pCNameHashNext; } return pCCurProp; // Will be NULL if not found } /*=================================================================== CPropInfoTable::NameHash Private function to get a hash value from a property name for the name table. Case insensitive. Parameters: tszName Name to hash Returns: Hash value of name ===================================================================*/ unsigned int CPropInfoTable::NameHash(LPCTSTR tszName) { ASSERT_STRING(tszName); unsigned int uiSum; unsigned int uiIndex; uiSum = 0; for (uiIndex=0; uiIndex < _tcslen(tszName); uiIndex++) { uiSum += _totlower(tszName[uiIndex]); } return (uiSum % PROPERTY_HASH_SIZE); } /*------------------------------------------------------------------ * C C l a s s P r o p I n f o */ // Everything is inline /*------------------------------------------------------------------ * C C l a s s I n f o */ /*=================================================================== CClassInfo::CClassInfo Constructor Parameters: None Returns: Nothing ===================================================================*/ CClassInfo::CClassInfo() : m_tszName(NULL), m_pCHashNext(NULL), m_fLoaded(FALSE), m_pCOptionalPropList(NULL), m_pCMandatoryPropList(NULL) { // Clear the hash table memset(m_rgCPropTable, 0, CLASS_PROPERTY_HASH_SIZE * sizeof(CClassPropInfo *)); } /*=================================================================== CClassInfo::Init Constructor Parameters: tszName Name of the class Returns: E_OUTOFMEMORY on allocation failure S_OK on success ===================================================================*/ HRESULT CClassInfo::Init(LPCTSTR tszName) { ASSERT_STRING(tszName); m_tszName = new TCHAR[_tcslen(tszName) + 1]; if (m_tszName == NULL) { return E_OUTOFMEMORY; } _tcscpy(m_tszName, tszName); return S_OK; } /*=================================================================== CClassInfo::~CClassInfo Destructor Parameters: None Returns: Nothing ===================================================================*/ CClassInfo::~CClassInfo() { Unload(); if (m_tszName != NULL) { delete m_tszName; } } /*=================================================================== CClassInfo::Load Loads class properties from the "_Machine_/Schema/Classes/_Class_/Mandatory" and "_Machine_/Schema/Classes/_Class_/Optional" keys into the class property information table, mandatory list and optional list. On failure, recovers by unloading everything. Parameters: pIMeta ATL Smart pointer to the metabase hMDClasses Open metabase handle to "_Machine_/Schema/Classes" key Returns: E_OUTOFMEMORY on allocation failure S_OK on success ===================================================================*/ HRESULT CClassInfo::Load(CComPtr<IMSAdminBase> &pIMeta, METADATA_HANDLE hMDClasses) { USES_CONVERSION; HRESULT hr; //If it's already loaded, unload then reload if (m_fLoaded) { Unload(); } // Open the class key METADATA_HANDLE hMDClass = NULL; hr = pIMeta->OpenKey(hMDClasses, T2W(m_tszName), METADATA_PERMISSION_READ, 1000, &hMDClass); if (FAILED(hr)) { return hr; } // Load the class properties METADATA_HANDLE hMDClassProp = NULL; int iDataIndex; METADATA_RECORD mdr; DWORD dwReqDataLen; DWORD dwReturnBufLen; UCHAR *lpReturnBuf = NULL; unsigned int uiLoc; CClassPropInfo *pCNewClassProp; //Setup the return buffer dwReturnBufLen = 1024; lpReturnBuf = new UCHAR[dwReturnBufLen]; if (lpReturnBuf == NULL) { hr = E_OUTOFMEMORY; goto LError; } // Load the mandatory class properties // Open the Mandatory key hr = pIMeta->OpenKey(hMDClass, L"Mandatory", METADATA_PERMISSION_READ, 1000, &hMDClassProp); if (FAILED(hr)) { goto LError; } // For each Mandatory property iDataIndex = 0; mdr.dwMDIdentifier = 0; mdr.dwMDAttributes = METADATA_NO_ATTRIBUTES; mdr.dwMDUserType = ALL_METADATA; mdr.dwMDDataType = ALL_METADATA; mdr.dwMDDataLen = dwReturnBufLen; mdr.pbMDData = (PBYTE) lpReturnBuf; mdr.dwMDDataTag = 0; hr = pIMeta->EnumData(hMDClassProp, NULL, &mdr, iDataIndex, &dwReqDataLen); while (SUCCEEDED(hr)|| (HRESULT_CODE(hr) == ERROR_INSUFFICIENT_BUFFER)) { // Handle insufficent buffer errors if ((HRESULT_CODE(hr) == ERROR_INSUFFICIENT_BUFFER)) { // Allocate more memory delete lpReturnBuf; dwReturnBufLen = dwReqDataLen; lpReturnBuf = new UCHAR[dwReturnBufLen]; if (lpReturnBuf == NULL) { hr = E_OUTOFMEMORY; goto LError; } // Loop again hr = S_OK; } else { //Buffer is big enough, proceed to add the class property // Create the Class Property object pCNewClassProp = new CClassPropInfo; if (pCNewClassProp == NULL) { hr = E_OUTOFMEMORY; goto LError; } hr = pCNewClassProp->Init(mdr.dwMDIdentifier, TRUE); if (FAILED(hr)) { delete pCNewClassProp; goto LError; } //Add it to the mandatory list pCNewClassProp->m_pCListNext = m_pCMandatoryPropList; m_pCMandatoryPropList = pCNewClassProp; //Add it to the hash table uiLoc = Hash(mdr.dwMDIdentifier); pCNewClassProp->m_pCHashNext = m_rgCPropTable[uiLoc]; m_rgCPropTable[uiLoc] = pCNewClassProp; iDataIndex++; } mdr.dwMDIdentifier = 0; mdr.dwMDAttributes = METADATA_NO_ATTRIBUTES; mdr.dwMDUserType = ALL_METADATA; mdr.dwMDDataType = ALL_METADATA; mdr.dwMDDataLen = dwReturnBufLen; mdr.pbMDData = (PBYTE) lpReturnBuf; mdr.dwMDDataTag = 0; hr = pIMeta->EnumData(hMDClassProp, NULL, &mdr, iDataIndex, &dwReqDataLen); } // Make sure we ran out of items if (HRESULT_CODE(hr) != ERROR_NO_MORE_ITEMS) { goto LError; } // Close the Mandatory key pIMeta->CloseKey(hMDClassProp); hMDClassProp = NULL; // Load the optional class properties // Open the Optional key hr = pIMeta->OpenKey(hMDClass, L"Optional", METADATA_PERMISSION_READ, 1000, &hMDClassProp); if (FAILED(hr)) { goto LError; } // For each Optional property iDataIndex = 0; mdr.dwMDIdentifier = 0; mdr.dwMDAttributes = METADATA_NO_ATTRIBUTES; mdr.dwMDUserType = ALL_METADATA; mdr.dwMDDataType = ALL_METADATA; mdr.dwMDDataLen = dwReturnBufLen; mdr.pbMDData = (PBYTE) lpReturnBuf; mdr.dwMDDataTag = 0; hr = pIMeta->EnumData(hMDClassProp, NULL, &mdr, iDataIndex, &dwReqDataLen); while (SUCCEEDED(hr)|| (HRESULT_CODE(hr) == ERROR_INSUFFICIENT_BUFFER)) { // Handle insufficent buffer errors if ((HRESULT_CODE(hr) == ERROR_INSUFFICIENT_BUFFER)) { // Allocate more memory delete lpReturnBuf; dwReturnBufLen = dwReqDataLen; lpReturnBuf = new UCHAR[dwReturnBufLen]; if (lpReturnBuf == NULL) { hr = E_OUTOFMEMORY; goto LError; } // Loop again hr = S_OK; } else { //Buffer is big enough, proceed to add the class property // Create the Class Property object pCNewClassProp = new CClassPropInfo; if (pCNewClassProp == NULL) { hr = E_OUTOFMEMORY; goto LError; } hr = pCNewClassProp->Init(mdr.dwMDIdentifier, FALSE); if (FAILED(hr)) { delete pCNewClassProp; goto LError; } //Add it to the optional list pCNewClassProp->m_pCListNext = m_pCOptionalPropList; m_pCOptionalPropList = pCNewClassProp; //Add it to the hash table uiLoc = Hash(mdr.dwMDIdentifier); pCNewClassProp->m_pCHashNext = m_rgCPropTable[uiLoc]; m_rgCPropTable[uiLoc] = pCNewClassProp; iDataIndex++; } mdr.dwMDIdentifier = 0; mdr.dwMDAttributes = METADATA_NO_ATTRIBUTES; mdr.dwMDUserType = ALL_METADATA; mdr.dwMDDataType = ALL_METADATA; mdr.dwMDDataLen = dwReturnBufLen; mdr.pbMDData = (PBYTE) lpReturnBuf; mdr.dwMDDataTag = 0; hr = pIMeta->EnumData(hMDClassProp, NULL, &mdr, iDataIndex, &dwReqDataLen); } // Make sure we ran out of items if (HRESULT_CODE(hr) != ERROR_NO_MORE_ITEMS) { goto LError; } // Close the Optional key pIMeta->CloseKey(hMDClassProp); delete lpReturnBuf; // Close the Class key pIMeta->CloseKey(hMDClass); m_fLoaded = TRUE; return S_OK; //Error durring loading, back out LError: if (hMDClassProp != NULL) { pIMeta->CloseKey(hMDClassProp); } if (hMDClass != NULL) { pIMeta->CloseKey(hMDClass); } if (lpReturnBuf != NULL) { delete lpReturnBuf; } Unload(); return hr; } /*=================================================================== CClassInfo::Unload Unloads the class property information table. Parameters: None Returns: Nothing ===================================================================*/ void CClassInfo::Unload() { int iIndex; CClassPropInfo *pCDeleteProp; // Clear the lists m_pCOptionalPropList = NULL; m_pCMandatoryPropList = NULL; // For each hash table entry for (iIndex =0; iIndex < CLASS_PROPERTY_HASH_SIZE; iIndex++) { // While the entry is not empty while (m_rgCPropTable[iIndex] != NULL) { // Nuke the first table entry pCDeleteProp = m_rgCPropTable[iIndex]; m_rgCPropTable[iIndex] = pCDeleteProp->m_pCHashNext; delete pCDeleteProp; } } m_fLoaded = FALSE; } /*=================================================================== CClassInfo::GetProperty Get the CClassPropInfo (class property info) object from the hash table given the property id. Parameters: dwId Identifier of the property to get Returns: NULL on failure Pointer to the CClassPropInfo object on success ===================================================================*/ CClassPropInfo *CClassInfo::GetProperty(DWORD dwId) { CClassPropInfo *pCCurProp; // Go to the table location pCCurProp = m_rgCPropTable[Hash(dwId)]; // Look at all of the entries while ((pCCurProp != NULL) && (pCCurProp->m_dwId != dwId)) { pCCurProp = pCCurProp->m_pCHashNext; } return pCCurProp; // Will be NULL if not found } /*------------------------------------------------------------------ * C C l a s s I n f o T a b l e */ /*=================================================================== CClassInfoTable::CClassInfoTable Constructor Parameters: None Returns: Nothing ===================================================================*/ CClassInfoTable::CClassInfoTable() : m_fLoaded(FALSE) { // Clear the hash table memset(m_rgCClassTable, 0, CLASS_HASH_SIZE * sizeof(CClassInfo *)); } /*=================================================================== CClassInfoTable::~CClassInfoTable Destructor Parameters: None Returns: Nothing ===================================================================*/ CClassInfoTable::~CClassInfoTable() { if (m_fLoaded) { Unload(); } } /*=================================================================== CClassInfoTable::Load Loads classes from the "_Machine_/Schema/Classes" key into the class information table. On failure, recovers by unloading everything. Parameters: pIMeta ATL Smart pointer to the metabase hMDComp Open metabase handle to "_Machine_" key Returns: E_OUTOFMEMORY on allocation failure S_OK on success ===================================================================*/ HRESULT CClassInfoTable::Load(CComPtr<IMSAdminBase> &pIMeta, METADATA_HANDLE hMDComp) { ASSERT(pIMeta.p != NULL); USES_CONVERSION; HRESULT hr; //If it's already loaded, unload then reload if (m_fLoaded) { Unload(); } int iKeyIndex; wchar_t wszSubKey[ADMINDATA_MAX_NAME_LEN]; LPTSTR tszSubKey; int iLoc; CClassInfo *pCNewClass; //Load the classes METADATA_HANDLE hMDClasses = NULL; // Open the Schema/Classes subkey hr = pIMeta->OpenKey(hMDComp, L"Schema/Classes", METADATA_PERMISSION_READ, 1000, &hMDClasses); if (FAILED(hr)) { return hr; }; // For each subkey iKeyIndex = 0; hr = pIMeta->EnumKeys(hMDClasses, NULL, wszSubKey, iKeyIndex); while (SUCCEEDED(hr)) { tszSubKey = W2T(wszSubKey); // Create the new class pCNewClass = new CClassInfo; if (pCNewClass == NULL) { hr = E_OUTOFMEMORY; goto LError; } hr = pCNewClass->Init(tszSubKey); if (FAILED(hr)) { delete pCNewClass; goto LError; } // Load the class properties hr = pCNewClass->Load(pIMeta, hMDClasses); if (FAILED(hr)) { delete pCNewClass; goto LError; } // Add it to the hash table iLoc = Hash(tszSubKey); pCNewClass->m_pCHashNext = m_rgCClassTable[iLoc]; m_rgCClassTable[iLoc] = pCNewClass; iKeyIndex++; hr = pIMeta->EnumKeys(hMDClasses, NULL, wszSubKey, iKeyIndex); } //Make sure we ran out of items if (!(HRESULT_CODE(hr) == ERROR_NO_MORE_ITEMS)) { goto LError; } // Close the schema properties key pIMeta->CloseKey(hMDClasses); m_fLoaded = TRUE; return S_OK; LError: if (hMDClasses != NULL) { pIMeta->CloseKey(hMDClasses); } // Cleanup the entries we loaded Unload(); return hr; } /*=================================================================== CClassInfo::Unload Unloads the class information table. Parameters: None Returns: Nothing ===================================================================*/ void CClassInfoTable::Unload() { int iIndex; CClassInfo *pCDeleteClass; // For each hash table entry for (iIndex =0; iIndex < CLASS_HASH_SIZE; iIndex++) { // While the entry is not empty while (m_rgCClassTable[iIndex] != NULL) { // Nuke the first table entry pCDeleteClass = m_rgCClassTable[iIndex]; m_rgCClassTable[iIndex] = pCDeleteClass->m_pCHashNext; delete pCDeleteClass; } } m_fLoaded = FALSE; } /*=================================================================== CCClasssInfoTable::GetClassInfo Get the CClassInfo (class info) object from the hash table given the class name Parameters: tszClassName Name of the class to get info for Returns: NULL on failure Pointer to the CClassInfo object on success ===================================================================*/ CClassInfo *CClassInfoTable::GetClassInfo(LPCTSTR tszName) { ASSERT_STRING(tszName); CClassInfo *pCCurClass; // Go to the table location pCCurClass = m_rgCClassTable[Hash(tszName)]; // Look at all of the entries while ((pCCurClass != NULL) && (_tcscmp(pCCurClass->m_tszName, tszName) != 0)) { pCCurClass = pCCurClass->m_pCHashNext; } return pCCurClass; // Will be NULL if not found } /*=================================================================== CClassInfoTable::Hash Private function to get a hash value from a class name for the class table. Parameters: tszName Name to hash Returns: Hash value of name ===================================================================*/ unsigned int CClassInfoTable::Hash(LPCTSTR tszName) { ASSERT_STRING(tszName); unsigned int uiSum; unsigned int uiIndex; uiSum = 0; for (uiIndex=0; uiIndex < _tcslen(tszName); uiIndex++) { uiSum += tszName[uiIndex]; } return (uiSum % CLASS_HASH_SIZE); } /*------------------------------------------------------------------ * C M e t a S c h e m a */ /*=================================================================== CMetaSchema::Init Constructor Parameters: pIMeta ATL Smart pointer to the metabase tszMachineName Name of machine the schema is for Returns: E_OUTOFMEMORY if allocation fails S_OK on success ===================================================================*/ HRESULT CMetaSchema::Init(const CComPtr<IMSAdminBase> &pIMeta, LPCTSTR tszMachineName) { ASSERT(pIMeta.p != NULL); ASSERT_STRING(tszMachineName); m_pIMeta = pIMeta; m_tszMachineName = new TCHAR[_tcslen(tszMachineName) + 1]; if (m_tszMachineName == NULL) return E_OUTOFMEMORY; _tcscpy(m_tszMachineName, tszMachineName); return S_OK; } /*=================================================================== CMetaSchema::GetPropInfo Get the CPropInfo (property info) object for a given id Parameters: dwId Id of property to get info for Returns: NULL on failure Pointer to the CPropInfo object on success ===================================================================*/ CPropInfo *CMetaSchema::GetPropInfo(DWORD dwId) { // Make sure the property table is up to date if (m_fPropTableDirty) { HRESULT hr; hr = LoadPropTable(); if (FAILED(hr)) { return NULL; } } // Pass on the call return m_CPropInfoTable.GetPropInfo(dwId); } /*=================================================================== CMetaSchema::GetPropInfo Get the CPropInfo (property info) object for a given name Parameters: tszName Name of property to get info for Returns: NULL on failure Pointer to the CPropInfo object on success ===================================================================*/ CPropInfo *CMetaSchema::GetPropInfo(LPCTSTR tszName) { ASSERT_STRING(tszName); // Make sure the property table is up to date if (m_fPropTableDirty) { HRESULT hr; hr = LoadPropTable(); if (FAILED(hr)) { return NULL; } } // Pass on the call return m_CPropInfoTable.GetPropInfo(tszName); } /*=================================================================== CMetaSchema::GetClassInfo Get the CClassInfo (class info) object for a given class name Parameters: tszClassName Name of the class to get info for Returns: NULL on failure Pointer to the CClassInfo object on success ===================================================================*/ CClassInfo *CMetaSchema::GetClassInfo(LPCTSTR tszClassName) { ASSERT_STRING(tszClassName); // Make sure the class table is up to date if (m_fClassTableDirty) { HRESULT hr; hr = LoadClassTable(); if (FAILED(hr)) { return NULL; } } // Pass on the call return m_CClassInfoTable.GetClassInfo(tszClassName); } /*=================================================================== CMetaSchema:::GetClassPropInfo Get the CClassPropInfo (class property info) object for a given class name and property id. Parameters: tszClassName Name of the class get property from dwPropId Id of property to get info for Returns: NULL on failure Pointer to the CClassPropInfo object on success ===================================================================*/ CClassPropInfo *CMetaSchema::GetClassPropInfo(LPCTSTR tszClassName, DWORD dwPropId) { // Make sure the class table is up to date if (m_fClassTableDirty) { HRESULT hr; hr = LoadClassTable(); if (FAILED(hr)) { return NULL; } } // Get the class CClassInfo *pCClassInfo; pCClassInfo = m_CClassInfoTable.GetClassInfo(tszClassName); if (pCClassInfo == NULL) { return NULL; } else { // Pass on the call return pCClassInfo->GetProperty(dwPropId); } } /*=================================================================== CMetaSchema::GetMandatoryClassPropList Get the list of optional class properties for a class name. Parameters: tszClassName Name of the class get the properties from Returns: NULL on failure Pointer to the first optional CClassPropInfo object on success ===================================================================*/ CClassPropInfo *CMetaSchema::GetMandatoryClassPropList(LPCTSTR tszClassName) { // Make sure the class table is up to date if (m_fClassTableDirty) { HRESULT hr; hr = LoadClassTable(); if (FAILED(hr)) { return NULL; } } // Get the class CClassInfo *pCClassInfo; pCClassInfo = m_CClassInfoTable.GetClassInfo(tszClassName); if (pCClassInfo == NULL) { return NULL; } else { // Pass on the call return pCClassInfo->GetMandatoryPropList(); } } /*=================================================================== CMetaSchema::GetOptionalClassPropList Get the list of optional class properties for a class name. Parameters: tszClassName Name of the class get the properties from Returns: NULL on failure Pointer to the first optional CClassPropInfo object on success ===================================================================*/ CClassPropInfo *CMetaSchema::GetOptionalClassPropList(LPCTSTR tszClassName) { // Make sure the class table is up to date if (m_fClassTableDirty) { HRESULT hr; hr = LoadClassTable(); if (FAILED(hr)) { return NULL; } } // Get the class CClassInfo *pCClassInfo; pCClassInfo = m_CClassInfoTable.GetClassInfo(tszClassName); if (pCClassInfo == NULL) { return NULL; } else { // Pass on the call return pCClassInfo->GetOptionalPropList(); } } /*=================================================================== CMetaSchema::ChangeNotification Processes change events effecting the machine where the schema is located. If the dirty flag for the property and class tables is not already set a call to Unload() is made to free up memory no longer needed. Parameters: tszChangedKey Cannonized key where change took place pcoChangeObject Pointer to the change event information Returns: Nothing ===================================================================*/ void CMetaSchema::ChangeNotification(LPTSTR tszKey, MD_CHANGE_OBJECT *pcoChangeObject) { ASSERT_POINTER(pcoChangeObject, MD_CHANGE_OBJECT); USES_CONVERSION; LPTSTR tszChangedKey; tszChangedKey = tszKey; // Skip the slash if (*tszChangedKey != _T('\0') && *tszChangedKey == _T('/')) { tszChangedKey++; } if (_tcsnicmp(tszChangedKey, _T("schema/"), 7) == 0) { // It effects a "Schema" subkey if ((_tcsnicmp(tszChangedKey, _T("schema/properties/"), 18) == 0) || (_tcsicmp(tszChangedKey, _T("schema/properties")) == 0)) { // It effects "Schema/Properties" if (!m_fPropTableDirty) { // Unload the prop table m_CPropInfoTable.Unload(); } m_fPropTableDirty = TRUE; } else if ((_tcsnicmp(tszChangedKey, _T("schema/classes/"), 15) == 0) || (_tcsicmp(tszChangedKey, _T("schema/classes")) == 0)) { // It effects "Schema/Classes" if (!m_fClassTableDirty) { // Unload the class table m_CClassInfoTable.Unload(); } m_fClassTableDirty = TRUE; } } else if (_tcsicmp(tszChangedKey, _T("schema")) == 0) { // Just the "Schema" key was changed if (!m_fPropTableDirty) { // Unload the prop table m_CPropInfoTable.Unload(); } m_fPropTableDirty = TRUE; if (!m_fClassTableDirty) { // Unload the class table m_CClassInfoTable.Unload(); } m_fClassTableDirty = TRUE; } } /*=================================================================== CMetaSchema::LoadPropTable (Re)loads a dirty property table Parameters: None Returns: S_OK on success ===================================================================*/ HRESULT CMetaSchema::LoadPropTable() { USES_CONVERSION; HRESULT hr; // Open the Machine key METADATA_HANDLE hMDKey; hr = m_pIMeta->OpenKey(METADATA_MASTER_ROOT_HANDLE, L"", // schema path moved to /schema METADATA_PERMISSION_READ, MUTIL_OPEN_KEY_TIMEOUT, &hMDKey); if (FAILED(hr)) { return hr; } // Load the properties hr = m_CPropInfoTable.Load(m_pIMeta, hMDKey); if (FAILED(hr)) { return hr; } // Close the Machine key m_pIMeta->CloseKey(hMDKey); m_fPropTableDirty = FALSE; return S_OK; } /*=================================================================== CMetaSchema::LoadClassTable (Re)loads a dirty class table Parameters: None Returns: S_OK on success ===================================================================*/ HRESULT CMetaSchema::LoadClassTable() { USES_CONVERSION; HRESULT hr; // Open the Machine key METADATA_HANDLE hMDKey; hr = m_pIMeta->OpenKey(METADATA_MASTER_ROOT_HANDLE, L"", // schema path moved to /schema METADATA_PERMISSION_READ, MUTIL_OPEN_KEY_TIMEOUT, &hMDKey); if (FAILED(hr)) { return hr; } // Load the properties hr = m_CClassInfoTable.Load(m_pIMeta, hMDKey); if (FAILED(hr)) { return hr; } // Close the Machine key m_pIMeta->CloseKey(hMDKey); m_fClassTableDirty = FALSE; return S_OK; } /*------------------------------------------------------------------ * C M e t a S c h e m a T a b l e */ /*=================================================================== CMetaSchemaTable::CMetaSchemaTable Constructor Parameters: None Returns: Nothing ===================================================================*/ CMetaSchemaTable::CMetaSchemaTable() : m_dwNumRef(1), m_fLoaded(FALSE) { m_CMSAdminBaseSink = new CComObject<CMSAdminBaseSink>; m_CMSAdminBaseSink->AddRef(); // Clear the hash table memset(m_rgCSchemaTable, 0, SCHEMA_HASH_SIZE * sizeof(CMetaSchema *)); } /*=================================================================== CMetaSchemaTable::~CMetaSchemaTable Destructor Parameters: None Returns: Nothing ===================================================================*/ CMetaSchemaTable::~CMetaSchemaTable() { TRACE0("MetaUtil: CMetaSchemaTable::~CMetaSchemaTable\n"); if (m_fLoaded) { Unload(); } DWORD dwTemp; if (m_CMSAdminBaseSink != NULL) { dwTemp = m_CMSAdminBaseSink->Release(); TRACE1("MetaUtil: CMetaSchemaTable::~CMetaSchemaTable Release Sink %i\n", dwTemp); } } /*=================================================================== CMetaSchemaTable::Load Loads/Creates the schema information for all of the machines Parameters: None Returns: Nothing ===================================================================*/ void CMetaSchemaTable::Load() { USES_CONVERSION; HRESULT hr; if (m_fLoaded) { Unload(); } // Create an instance of the Metabase Admin Base Object // Need a seperate instance so we pick up changes made // by our "parent" MetaUtil object family. hr = ::CoCreateInstance(CLSID_MSAdminBase, NULL, CLSCTX_ALL, IID_IMSAdminBase, (void **)&m_pIMeta); if (FAILED(hr)) { m_pIMeta = NULL; return; } // Build the schema list int iKeyIndex; wchar_t wszMDSubKey[ADMINDATA_MAX_NAME_LEN]; CMetaSchema *pCNewSchema; int iLoc; // For each subkey of root iKeyIndex = 0; hr = m_pIMeta->EnumKeys(METADATA_MASTER_ROOT_HANDLE, NULL, wszMDSubKey, iKeyIndex); while (SUCCEEDED(hr)) { // Create the new schema pCNewSchema = new CMetaSchema; if (pCNewSchema == NULL) { goto LError; } hr = pCNewSchema->Init(m_pIMeta, W2T(wszMDSubKey)); if (FAILED(hr)) { delete pCNewSchema; goto LError; } // Add it to the hash table iLoc = Hash(W2T(wszMDSubKey)); pCNewSchema->m_pCNextSchema = m_rgCSchemaTable[iLoc]; m_rgCSchemaTable[iLoc] = pCNewSchema; // Next iKeyIndex++; hr = m_pIMeta->EnumKeys(METADATA_MASTER_ROOT_HANDLE, NULL, wszMDSubKey, iKeyIndex); } // Make sure we ran out of items if (HRESULT_CODE(hr) != ERROR_NO_MORE_ITEMS) { goto LError; } // Setup notification if (m_CMSAdminBaseSink != NULL) { m_CMSAdminBaseSink->Connect(m_pIMeta, this); } m_fLoaded = TRUE; return; LError: // Back out of the load Unload(); } /*=================================================================== CMetaSchemaTable::Unload Unloads the schema table. Parameters: None Returns: Nothing ===================================================================*/ void CMetaSchemaTable::Unload() { int iIndex; CMetaSchema *pCDelete; // Stop notification if (m_CMSAdminBaseSink != NULL) { m_CMSAdminBaseSink->Disconnect(); } m_pIMeta = NULL; // For each hash table entry for (iIndex =0; iIndex < SCHEMA_HASH_SIZE; iIndex++) { // While the entry is not empty while (m_rgCSchemaTable[iIndex] != NULL) { // Nuke the first table entry pCDelete = m_rgCSchemaTable[iIndex]; m_rgCSchemaTable[iIndex] = pCDelete->m_pCNextSchema; delete pCDelete; } } m_fLoaded = FALSE; } /*=================================================================== CMetaSchemaTable::GetPropInfo Get the CPropInfo (property info) object for a given key and id Parameters: tszKey Key the property is located under dwPropId Id of the property to get info for Returns: NULL on failure Pointer to the CPropInfo object on success ===================================================================*/ CPropInfo *CMetaSchemaTable::GetPropInfo(LPCTSTR tszKey, DWORD dwPropId) { ASSERT_STRING(tszKey); if (!m_fLoaded) { Load(); } CMetaSchema *pCSchema; pCSchema = GetSchema(tszKey); // If found then pass the call on if (pCSchema != NULL) { return pCSchema->GetPropInfo(dwPropId); } else { return NULL; } } /*=================================================================== CMetaSchemaTable::GetPropInfo Get the CPropInfo (property info) object for a given key and name Parameters: tszKey Key the property is located under tszPropName Name of the property to get info for Returns: NULL on failure Pointer to the CPropInfo object on success ===================================================================*/ CPropInfo *CMetaSchemaTable::GetPropInfo(LPCTSTR tszKey, LPCTSTR tszPropName) { ASSERT_STRING(tszKey); ASSERT_STRING(tszPropName); if (!m_fLoaded) { Load(); } CMetaSchema *pCSchema; pCSchema = GetSchema(tszKey); // If found then pass the call on if (pCSchema != NULL) { return pCSchema->GetPropInfo(tszPropName); } else { return NULL; } } /*=================================================================== CMetaSchemaTable::GetClassInfo Get the CClassInfo (class info) object for a given key and class name Parameters: tszKey Approxiamte key the class is located under. Used to get the machine name. tszClassName Name of the class to get info for Returns: NULL on failure Pointer to the CClassInfo object on success ===================================================================*/ CClassInfo *CMetaSchemaTable::GetClassInfo(LPCTSTR tszKey, LPCTSTR tszClassName) { ASSERT_STRING(tszKey); ASSERT_STRING(tszClassName); if (!m_fLoaded) { Load(); } CMetaSchema *pCSchema; pCSchema = GetSchema(tszKey); // If found then pass the call on if (pCSchema != NULL) { return pCSchema->GetClassInfo(tszClassName); } else { return NULL; } } /*=================================================================== CMetaSchemaTable::GetClassPropInfo Get the CClassPropInfo (class property info) object for a given key, class name and property id. Parameters: tszKey Approxiamte key the class is located under. Used to get the machine name. tszClassName Name of the class get property from dwPropId Id of property to get info for Returns: NULL on failure Pointer to the CClassPropInfo object on success ===================================================================*/ CClassPropInfo *CMetaSchemaTable::GetClassPropInfo(LPCTSTR tszKey, LPCTSTR tszClassName, DWORD dwPropId) { ASSERT_STRING(tszKey); ASSERT_STRING(tszClassName); if (!m_fLoaded) { Load(); } CMetaSchema *pCSchema; pCSchema = GetSchema(tszKey); // If found then pass the call on if (pCSchema != NULL) { return pCSchema->GetClassPropInfo(tszClassName, dwPropId); } else { return NULL; } } /*=================================================================== CMetaSchemaTable::GetMandatoryClassPropList Get the list of mandatory class properties for a given key and class name. Parameters: tszKey Approxiamte key the class is located under. Used to get the machine name. tszClassName Name of the class get the properties from Returns: NULL on failure Pointer to the first mandatory CClassPropInfo object on success ===================================================================*/ CClassPropInfo *CMetaSchemaTable::GetMandatoryClassPropList(LPCTSTR tszKey, LPCTSTR tszClassName) { ASSERT_STRING(tszKey); ASSERT_STRING(tszClassName); if (!m_fLoaded) { Load(); } CMetaSchema *pCSchema; pCSchema = GetSchema(tszKey); // If found then pass the call on if (pCSchema != NULL) { return pCSchema->GetMandatoryClassPropList(tszClassName); } else { return NULL; } } /*=================================================================== CMetaSchemaTable::GetOptionalClassPropList Get the list of optional class properties for a given key and class name. Parameters: tszKey Approxiamte key the class is located under. Used to get the machine name. tszClassName Name of the class get the properties from Returns: NULL on failure Pointer to the first optional CClassPropInfo object on success ===================================================================*/ CClassPropInfo *CMetaSchemaTable::GetOptionalClassPropList(LPCTSTR tszKey, LPCTSTR tszClassName) { ASSERT_STRING(tszKey); ASSERT_STRING(tszClassName); if (!m_fLoaded) { Load(); } CMetaSchema *pCSchema; pCSchema = GetSchema(tszKey); // If found then pass the call on if (pCSchema != NULL) { return pCSchema->GetOptionalClassPropList(tszClassName); } else { return NULL; } } /*=================================================================== CMetaSchemaTable::SinkNotify Metabase change notification callback from CMSAdminBaseSink. Either determines a need to reload all of the schema information or sends the message on to the appropriate CMetaSchema object. Parameters: dwMDNumElements Number of change events pcoChangeObject Array of change events Returns: S_OK always ===================================================================*/ HRESULT CMetaSchemaTable::SinkNotify(DWORD dwMDNumElements, MD_CHANGE_OBJECT pcoChangeObject[]) { ASSERT(IsValidAddress(pcoChangeObject, dwMDNumElements * sizeof(MD_CHANGE_OBJECT), FALSE)); USES_CONVERSION; DWORD dwIndex; CMetaSchema *pCMetaSchema; // For each event for (dwIndex = 0; dwIndex < dwMDNumElements; dwIndex++) { // Figure out what machine it's for TCHAR tszKey[ADMINDATA_MAX_NAME_LEN]; _tcscpy(tszKey, W2T(pcoChangeObject[dwIndex].pszMDPath)); CannonizeKey(tszKey); pCMetaSchema = GetSchema(tszKey); // If the machine is not found if (pCMetaSchema == NULL) { // Reload the schema table Load(); } else { // Send it to the appropriate machine pCMetaSchema->ChangeNotification(tszKey, &(pcoChangeObject[dwIndex])); } } return S_OK; } /*=================================================================== CMetaSchemaTable::GetSchema Get the schema object that contains information on the given key. Parameters: tszKey Approxiamte key to get schema information for. Returns: NULL on failure Pointer to the CMetaSchema object on success ===================================================================*/ CMetaSchema *CMetaSchemaTable::GetSchema(LPCTSTR tszKey) { // Extract the machine name TCHAR tszMachineName[ADMINDATA_MAX_NAME_LEN]; ::GetMachineFromKey(tszKey, tszMachineName); // Find the right schema CMetaSchema *pCCurSchema; pCCurSchema =m_rgCSchemaTable[Hash(tszMachineName)]; while ((pCCurSchema != NULL) && (_tcsicmp(pCCurSchema->m_tszMachineName, tszMachineName) != 0)) { pCCurSchema = pCCurSchema->m_pCNextSchema; } return pCCurSchema; // Will be NULL if not found } /*=================================================================== CMetaSchemaTable::Hash Private function to get a hash value from a machine name for the schema table. Parameters: tszName Machinea name to hash Returns: Hash value of name ===================================================================*/ unsigned int CMetaSchemaTable::Hash(LPCTSTR tszName) { ASSERT_STRING(tszName); unsigned int uiSum; unsigned int uiIndex; uiSum = 0; for (uiIndex=0; uiIndex < _tcslen(tszName); uiIndex++) { uiSum += _totlower(tszName[uiIndex]); } return (uiSum % SCHEMA_HASH_SIZE); } /*------------------------------------------------------------------ * C M S A d m i n B a s e S i n k */ /*=================================================================== CMSAdminBaseSink::CMSAdminBaseSink Constructor Parameters: None Returns: Nothing ===================================================================*/ CMSAdminBaseSink::CMSAdminBaseSink() : m_fConnected(FALSE), m_dwCookie(0), m_pCMetaSchemaTable(NULL) { } /*=================================================================== CMSAdminBaseSink::~CMSAdminBaseSink Destructor Parameters: None Returns: Nothing ===================================================================*/ CMSAdminBaseSink::~CMSAdminBaseSink() { TRACE0("MetaUtil: CMSAdminBaseSink::~CMSAdminBaseSink !!!!!!!!!!!\n"); // Make sure we disconnected if (m_fConnected) { Disconnect(); } } /*=================================================================== CMSAdminBaseSink::SinkNotify Entry point for notification events from the metabase admin base object. Parameters: dwMDNumElements Number of change events pcoChangeObject Array of change events Returns: E_FAIL if m_pCMetaSchemaTable == NULL S_OK on success ===================================================================*/ STDMETHODIMP CMSAdminBaseSink::SinkNotify(DWORD dwMDNumElements, MD_CHANGE_OBJECT pcoChangeObject[]) { TRACE0("MetaUtil: CMSAdminBaseSink::SinkNotify\n"); ASSERT(IsValidAddress(pcoChangeObject, dwMDNumElements * sizeof(MD_CHANGE_OBJECT), FALSE)); if (m_pCMetaSchemaTable == NULL) { return E_FAIL; } // Pass on the notification return m_pCMetaSchemaTable->SinkNotify(dwMDNumElements, pcoChangeObject); } /*=================================================================== CMSAdminBaseSink::ShutdownNotify Entry point for the shutdown notification event from the metabase admin base object. Parameters: None Returns: ERROR_NOT_SUPPORTE always ===================================================================*/ STDMETHODIMP CMSAdminBaseSink::ShutdownNotify() { return HRESULT_FROM_WIN32(ERROR_NOT_SUPPORTED); } /*=================================================================== CMSAdminBaseSink::Connect Begins notification of change events. Connects to the metabase admin base object. Parameters: pIMeta Pointer to the metabase admin base object pCMetaSchemaTable Pointer to the schema table so that events can be sent back to it. Returns: E_NOINTERFACE if can not convert IMSAdminBase to IConnectionPointContainer. S_OK on success ===================================================================*/ HRESULT CMSAdminBaseSink::Connect(CComPtr<IMSAdminBase> &pIMeta, CMetaSchemaTable *pCMetaSchemaTable) { ASSERT(pIMeta.p != NULL); ASSERT_POINTER(pCMetaSchemaTable, CMetaSchemaTable); HRESULT hr; if (m_fConnected) { Disconnect(); } m_pCMetaSchemaTable = pCMetaSchemaTable; // Get the connection container CComQIPtr<IConnectionPointContainer, &IID_IConnectionPointContainer> pIMetaConnContainer; pIMetaConnContainer = pIMeta; if (pIMetaConnContainer == NULL) { // Failure to change interfaces return E_NOINTERFACE; } // Get the connection point hr = pIMetaConnContainer->FindConnectionPoint(IID_IMSAdminBaseSink, &m_pIMetaConn); if (FAILED(hr)) { return hr; } // Advise (connect) AddRef(); m_pIMetaConn->Advise((IMSAdminBaseSink *) this, &m_dwCookie); m_fConnected = TRUE; return S_OK; } /*=================================================================== CMSAdminBaseSink::Disconnect Stops notification of change events. Disconnects from the metabase admin base object. Parameters: None Returns: Nothing ===================================================================*/ void CMSAdminBaseSink::Disconnect() { if (!m_fConnected) { // Not connected return; } // Stop notification m_pIMetaConn->Unadvise(m_dwCookie); // No longer needed m_pIMetaConn = NULL; m_pCMetaSchemaTable = NULL; m_fConnected = FALSE; }
SaivNator/MIPTP_cpp
include/TimerWrapper.hpp
//TimerWrapper.hpp //Author: <NAME> #pragma once #ifndef TimerWrapper_HEADER #define TimerWrapper_HEADER #include <iostream> #include <sstream> //Linux stuff #include <sys/timerfd.h> #include <unistd.h> //local #include "LinuxException.hpp" class TimerWrapper { public: /* Constructor. */ TimerWrapper(); /* Set expiration from now. Parameters: ms time Return: void */ void setExpirationFromNow(int ms); /* Read expired from fd. Parameters: Return: number of times the timer has expired since armed */ int readExpiredTime(); /* Get fd. Parameters: Return: fd */ int getFd(); /* Close (release resources). Parameters: Return: void */ void closeResources(); /* Check if fd is closed. Parameters: Return: if true close, else valid */ bool isClosed(); private: int m_fd; bool m_closed; /* timespec to ms. */ int timespecToMs(struct timespec & t); /* ms to timespec. */ struct timespec msToTimespec(int ms); }; #endif // !TimerWrapper_HEADER
tushev/Matlab-Editor-Plugin
src/at/mep/gui/ClickHistory.java
<filename>src/at/mep/gui/ClickHistory.java package at.mep.gui; import at.mep.editor.EditorWrapper; import at.mep.prefs.Settings; import com.mathworks.matlab.api.editor.Editor; import java.util.ArrayList; import java.util.List; /** Created by <NAME> on 2017-10-18. */ public class ClickHistory { private static ClickHistory INSTANCE; private List<CHPair> history = new ArrayList<>(Settings.getPropertyInt("ch.sizeMax")); /** index for history user is currently visiting */ private int currentlyOn = -1; private ClickHistory() { } public static ClickHistory getINSTANCE() { if (INSTANCE == null) { INSTANCE = new ClickHistory(); } return INSTANCE; } public void add(Editor editor) { int pos = EditorWrapper.getCaretPosition(editor); CHPair chPair = new CHPair(editor, pos); // don't add if the location currently, is the same as the one trying to be added if (currentlyOn >= 0 && history.get(currentlyOn).eq(chPair)) { return; } // if currently on is the last index, add new CHPair if (currentlyOn + 1 == history.size()) { history.add(chPair); resetCurrentlyOnToSize(); return; } // if currently on is somewhere in history, remove all history after currently on, then add CHPair if (currentlyOn + 1 < history.size()) { history = history.subList(0, currentlyOn); history.add(chPair); resetCurrentlyOnToSize(); return; } // trim list if trimList(); } public void locationPrevious() { currentlyOn -= 1; if (currentlyOn < 0) { currentlyOn = 0; } if (history.size() < 1) { resetCurrentlyOnToSize(); return; } cleanupHistory(); CHPair chPair = history.get(currentlyOn); EditorWrapper.bringToFront(chPair.editor); Editor editor = EditorWrapper.goToPositionAndHighlight(chPair.editor, chPair.pos, chPair.pos); chPair.setEditor(editor); } public void locationNext() { currentlyOn += 1; if (currentlyOn >= history.size()) { resetCurrentlyOnToSize(); } cleanupHistory(); CHPair chPair = history.get(currentlyOn); EditorWrapper.bringToFront(chPair.editor); Editor editor = EditorWrapper.goToPositionAndHighlight(chPair.editor, chPair.pos, chPair.pos); chPair.setEditor(editor); } private void trimList() { if (history.size() >= Settings.getPropertyInt("ch.sizeMax")) { history.remove(0); resetCurrentlyOnToSize(); } } private void cleanupHistory() { for (int i = history.size()-1; i >= 0 ; i--) { CHPair chPair = history.get(i); if (!chPair.isValid()) { history.remove(chPair); } } if (currentlyOn >= history.size()) { resetCurrentlyOnToSize(); } } public void resetCurrentlyOnToSize() { currentlyOn = history.size() - 1; } private class CHPair { private Editor editor = null; private int pos = -1; public CHPair(Editor editor, int pos) { this.editor = editor; this.pos = pos; } public void setEditor(Editor editor) { this.editor = editor; } public Editor getEditor() { return editor; } public boolean isValid() { return EditorWrapper.isopen(editor) || !EditorWrapper.getFile(editor).exists(); } public int getPos() { return pos; } public boolean eq(CHPair chPair) { return this.editor == chPair.editor && this.pos == chPair.pos; } } }
steakknife/pcgeos
Tools/goc/strwid.c
/*********************************************************************** * PROJECT: PCGEOS * MODULE: UIC -- string widths * FILE: strwid.c * * AUTHOR: <NAME> * * ROUTINES: * Name Description * ---- ----------- * * DESCRIPTION: * Output routines for UIC * ***********************************************************************/ #ifndef lint static char *rcsid = "$Id: strwid.c,v 1.6 97/09/22 23:40:33 allen Exp $"; #endif lint #include <config.h> #include "goc.h" #include "strwid.h" #include "scan.h" #include <ctype.h> #include <compat/string.h> int Berkeley9WidthTable[] = { 4, 3, 6, 6, 8, 11, 8, 4, 5, 5, 6, 7, 4, 6, 3, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 3, 4, 6, 6, 6, 7, 10, 7, 7, 7, 7, 6, 6, 7, 7, 3, 7, 8, 6, 9, 8, 7, 7, 7, 7, 7, 7, 7, 7, 9, 7, 7, 6, 4, 6, 4, 6, 6, 4, 7, 7, 7, 7, 7, 6, 7, 7, 3, 5, 7, 3, 11, 7, 7, 7, 7, 6, 7, 5, 7, 7, 11, 7, 7, 6, 5, 3, 5, 7, 0, 7, 7, 7, 6, 8, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 3, 3, 3, 3, 7, 7, 7, 7, 7, 7, 6, 7, 7, 7, 5, 6, 8, 7, 8, 6, 9, 9, 11, 11, 12, 4, 4, 7, 10, 9, 12, 7, 5, 5, 7, 7, 7, 8, 9, 10, 5, 7, 7, 10, 11, 8, 7, 3, 7, 9, 5, 8, 8, 9, 10, 9, 4, 7, 7, 7, 10, 11, 7, 9, 7, 7, 4, 4, 7, 10, 7, 7, 8, 9, 5, 5, 7, 7, 5, 4, 4, 7, 14, 7, 6, 7, 6, 6, 3, 3, 3, 3, 7, 7, 9, 7, 7, 7, 7, 3, 4, 7, 6, 5, 3, 5, 3, 5, 3, 4 }; int Berkeley10WidthTable[] = { 4, 3, 6, 8, 7, 11, 8, 4, 5, 5, 8, 7, 4, 5, 3, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 3, 4, 5, 6, 5, 7, 10, 7, 7, 7, 7, 6, 6, 7, 7, 3, 7, 8, 6, 10, 8, 7, 7, 7, 7, 8, 7, 7, 7, 10, 8, 7, 7, 4, 6, 4, 6, 7, 4, 7, 7, 7, 7, 7, 6, 7, 7, 3, 5, 7, 3, 11, 7, 7, 7, 7, 6, 7, 5, 7, 7, 11, 8, 7, 6, 5, 3, 5, 7, 0, 7, 7, 7, 6, 8, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 3, 3, 4, 3, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 5, 7, 8, 8, 8, 6, 9, 9, 11, 11, 14, 3, 4, 7, 10, 9, 13, 7, 6, 6, 7, 8, 7, 8, 8, 9, 5, 6, 6, 8, 11, 8, 7, 3, 6, 9, 5, 9, 9, 9, 9, 9, 5, 7, 7, 7, 10, 11, 6, 9, 7, 7, 4, 4, 7, 9, 6, 7, 8, 9, 5, 5, 7, 7, 4, 3, 4, 8, 14, 7, 6, 7, 6, 6, 3, 3, 3, 3, 7, 7, 9, 7, 7, 7, 7, 3, 4, 7, 6, 5, 2, 4, 3, 5, 3, 4 }; /* * Name: GetStringWidth * Author: <NAME> * * Synopsis: Calculate the width of a string * */ int GetStringWidth(char *string, int widthTable[], int ptsize) { int width = 0; unsigned short c; while (c = Scan_ScanSJISChar((unsigned char **) &string)) { /* * In Pizza (the only current DBCS release), we use 12 and 16 point, * but the values are in the same place. We don't have a 12 point * yet, so just stuff 0 in for now, which will cause the width to * be calculated when needed. */ if (dbcsRelease && ptsize == 10) { if (c < 256) { width += 8; } else { width += 16; } } else if (c < 256) { width += widthTable[c-32]; } else { /* We don't have the width here, so we stuff in a huge width here so that our call will drop the total width and force it to be calculated when needed. */ width = 10000; /* big-enough width which will eventually be dropped by our caller */ } } return(width); } /* * Name: CalcHintedWidth * Author: <NAME> * * Synopsis: Calculate the hinted width for a string * */ int CalcHintedWidth(char *string) { int width9, width10; if ((string == NULL) || (*string == '\0')) { return(0); } width9 = GetStringWidth(string, Berkeley9WidthTable, 9); if (width9 > MAX_WIDTH_9) { return(0); } width10 = GetStringWidth(string, Berkeley10WidthTable, 10); if (width9 > MAX_WIDTH_10) { return(0); } return (VMCW_HINTED | (width9 << VMCW_BERKELEY_9_OFFSET) | (width10 << VMCW_BERKELEY_10_OFFSET)); }
iceant/spring-jdbc-dao-generator
spring-jdbc-dao-generator-webapp/src/main/java/com/github/iceant/spring/jdbc/dao/generator/webapp/utils/AppUtil.java
package com.github.iceant.spring.jdbc.dao.generator.webapp.utils; import com.github.iceant.spring.jdbc.dao.generator.webapp.vos.JsonResponse; import org.springframework.boot.system.ApplicationHome; import java.io.File; import java.util.Date; import java.util.function.Function; public class AppUtil { private static SequenceGenerator sequenceGenerator = new SequenceGenerator(SequenceGenerator.createCustomEpoch(2020, 1, 1)); public static File getAppHome(Class springBootApplicationClass) { ApplicationHome applicationHome = new ApplicationHome(springBootApplicationClass); return applicationHome.getDir(); } public static long id(){ return sequenceGenerator.nextId(); } public static Date now(){ return new Date(); } public static JsonResponse makeResponse(Function fn, Object ... objects) { JsonResponse response= new JsonResponse(); try{ Object result = fn.apply(objects); response.setStatusCode(1); response.setStatus(true); response.setMessage("success"); response.put("result", result); }catch (Exception err){ response.setStatusCode(-1); response.setStatus(false); response.setMessage(SpringUtil.exceptionToString(err)); } return response; } }
matya/s6-dns
src/include/s6-dns/s6dns-resolve.h
/* ISC license. */ #ifndef S6DNS_RESOLVE_H #define S6DNS_RESOLVE_H #include <errno.h> #include <skalibs/uint16.h> #include <skalibs/uint32.h> #include <skalibs/tai.h> #include <skalibs/stralloc.h> #include <skalibs/genalloc.h> #include <skalibs/ip46.h> #include <s6-dns/s6dns-constants.h> #include <s6-dns/s6dns-ip46.h> #include <s6-dns/s6dns-domain.h> #include <s6-dns/s6dns-message.h> #include <s6-dns/s6dns-engine.h> #include <s6-dns/s6dns-rci.h> /* Synchronous DNS resolution primitives. The non-reentrant functions are just wrappers around the reentrant ones, using globals for the parameters the user doesn't care about: s6dns_engine_here for the query storage (dt), s6dns_debughook_zero meaning no debugging needed, s6dns_rci_here for the resolv.conf information (qualification rules and initial cache IPs) */ /* The basic s6dns_engine wrapper loop: takes an initted dt and resolves it. */ #define s6dns_resolve_loop(deadline, stamp) s6dns_resolve_loop_r(&s6dns_engine_here, (deadline), stamp) #define s6dns_resolve_loop_g(deadline) s6dns_resolve_loop((deadline), &STAMP) #define s6dns_resolve_loop_r(dt, deadline, stamp) (s6dns_resolven_loop(dt, 1, 2, deadline, stamp) < 0 ? 0 : (dt)->status ? (errno = (dt)->status, 0) : 1) #define s6dns_resolve_loop_r_g(dt, deadline) s6dns_resolve_loop(dt, (deadline), &STAMP) /* QoL functions for single-domain synchronous resolution. */ /* The innermost one: Initializes the dt with the given data (d, qtype), then calls the loop. */ #define s6dns_resolve_core(d, qtype, deadline, stamp) s6dns_resolve_core_r(d, qtype, &s6dns_engine_here, &s6dns_rci_here.servers, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_core_g(d, qtype, deadline) s6dns_resolve_core(d, qtype, (deadline), &STAMP) extern int s6dns_resolve_core_r (s6dns_domain_t const *, uint16, s6dns_engine_t_ref, s6dns_ip46list_t const *, s6dns_debughook_t const *, tain_t const *, tain_t *) ; #define s6dns_resolve_core_r_g(d, qtype, dt, servers, dbh, deadline) s6dns_resolve_core_r(d, qtype, dt, servers, dbh, (deadline), &STAMP) /* Just above. Calls s6dns_resolve_core() then feeds the result to s6dns_message_parse(). Returns -1 if a resolving or parsing error occurs. Returns 0 if everything works but the result is empty for some reason (i.e. nxdomain). Returns 1 if everything works and there's an actual answer. */ #define s6dns_resolve_parse(d, qtype, parsefunc, parsedata, deadline, stamp) s6dns_resolve_parse_r(d, qtype, parsefunc, parsedata, &s6dns_engine_here, &s6dns_rci_here.servers, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_parse_g(d, qtype, parsefunc, parsedata, deadline) s6dns_resolve_parse(d, qtype, parsefunc, parsedata, (deadline), &STAMP) extern int s6dns_resolve_parse_r (s6dns_domain_t const *, uint16, s6dns_message_rr_func_t_ref, void *, s6dns_engine_t_ref, s6dns_ip46list_t const *, s6dns_debughook_t const *, tain_t const *, tain_t *) ; #define s6dns_resolve_parse_r_g(d, qtype, parsefunc, parsedata, dt, servers, dbh, deadline) s6dns_resolve_parse_r(d, qtype, parsefunc, parsedata, dt, servers, dbh, (deadline), &STAMP) /* Resolution without qualification. Encoding/decoding included. */ #define s6dns_resolvenoq(name, len, qtype, parsefunc, parsedata, deadline, stamp) s6dns_resolvenoq_r(name, len, qtype, parsefunc, parsedata, &s6dns_engine_here, &s6dns_rci_here.servers, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolvenoq_g(name, len, qtype, parsefunc, parsedata, deadline) s6dns_resolvenoq(name, len, qtype, parsefunc, parsedata, (deadline), &STAMP) extern int s6dns_resolvenoq_r (char const *, unsigned int, uint16, s6dns_message_rr_func_t_ref, void *, s6dns_engine_t_ref, s6dns_ip46list_t const *, s6dns_debughook_t const *, tain_t const *, tain_t *) ; #define s6dns_resolvenoq_r_g(name, len, qtype, parsefunc, parsedata, dt, servers, dbh, deadline) s6dns_resolvenoq_r(name, len, qtype, parsefunc, parsedata, dt, servers, dbh, (deadline), &STAMP) /* Resolution with qualification: Get a qualification list from a name, then resolve the list in parallel. */ #define s6dns_resolveq(name, len, qtype, parsefunc, parsedata, deadline, stamp) s6dns_resolveq_r(name, len, qtype, parsefunc, parsedata, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolveq_g(name, len, qtype, parsefunc, parsedata, deadline) s6dns_resolveq(name, len, qtype, parsefunc, parsedata, (deadline), &STAMP) extern int s6dns_resolveq_r (char const *, unsigned int, uint16, s6dns_message_rr_func_t_ref, void *, s6dns_rci_t const *, s6dns_debughook_t const *, tain_t const *, tain_t *) ; #define s6dns_resolveq_r_g(name, len, qtype, parsefunc, parsedata, rci, dbh, deadline) s6dns_resolveq_r(name, len, qtype, parsefunc, parsedata, rci, dbh, (deadline), &STAMP) /* The resolution primitive that calls s6dns_resolvenoq() if the qualif flag is cleared and s6dns_resolveq() if it is set. */ #define s6dns_resolve(name, len, qtype, parsefunc, parsedata, qualif, deadline, stamp) s6dns_resolve_r(name, len, qtype, parsefunc, parsedata, qualif, &s6dns_engine_here, &s6dns_rci_here, &s6dns_debughook_t const *, deadline, stamp) #define s6dns_resolve_g(name, len, qtype, parsefunc, parsedata, qualif, deadline) s6dns_resolve(name, len, qtype, parsefunc, parsedata, qualif, (deadline), &STAMP) #define s6dns_resolve_r(name, len, qtype, parsefunc, parsedata, qualif, dt, rci, dbh, deadline, stamp) ((qualif) ? s6dns_resolveq_r(name, len, qtype, parsefunc, parsedata, rci, dbh, deadline, stamp) : s6dns_resolvenoq_r(name, len, qtype, parsefunc, parsedata, dt, &(rci)->servers, dbh, deadline, stamp)) #define s6dns_resolve_r_g(name, len, qtype, parsefunc, parsedata, qualif, dt, rci, dbh, deadline) s6dns_resolve_r(name, len, qtype, parsefunc, parsedata, qualif, dt, rci, dbh, (deadline), &STAMP) /* How to perform both AAAA and A queries at the same time */ #define s6dns_resolvenoq_aaaaa(ips, name, len, deadline, stamp) s6dns_resolvenoq_aaaaa_r(ips, name, len, &s6dns_rci_here.servers, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolvenoq_aaaaa_g(ips, name, len, deadline) s6dns_resolvenoq_aaaaa(ips, name, len, (deadline), &STAMP) extern int s6dns_resolvenoq_aaaaa_r(genalloc *, char const *, unsigned int, s6dns_ip46list_t const *, s6dns_debughook_t const *, tain_t const *, tain_t *) ; #define s6dns_resolvenoq_aaaaa_r_g(ips, name, len, servers, dbh, deadline) s6dns_resolvenoq_aaaaa_r(ips, name, len, servers, dbh, (deadline), &STAMP) #define s6dns_resolveq_aaaaa(ips, name, len, deadline, stamp) s6dns_resolveq_aaaaa_r(ips, name, len, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolveq_aaaaa_g(ips, name, len, deadline) s6dns_resolveq_aaaaa(ips, name, len, (deadline), &STAMP) extern int s6dns_resolveq_aaaaa_r(genalloc *, char const *, unsigned int, s6dns_rci_t const *, s6dns_debughook_t const *, tain_t const *, tain_t *) ; #define s6dns_resolveq_aaaaa_r_g(ips, name, len, rci, dbh, deadline) s6dns_resolvenoq_aaaaa_r(ips, name, len, rci, dbh, (deadline), &STAMP) /* Some high-level functions. Queries are automatically translated to domain form and encoded. Domains returned in a stralloc are automatically decoded. Warning: decoded domains all start with '.' The int flag decides if qualification is needed or not. */ /* For A fields: the stralloc answer has 4 chars per IP4. For AAAA fields: the stralloc answer has 16 chars per IP6. For TXT fields (and other mpag stuff): the stralloc contains the strings and the genalloc contains a list of offsets into that stralloc. For other fields: the result is stored in a genalloc of the appropriate type. */ #define s6dns_resolve_a(ips, name, len, qualif, deadline, stamp) s6dns_resolve_a_r(ips, name, len, qualif, &s6dns_engine_here, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_a_g(ips, name, len, qualif, deadline) s6dns_resolve_a(ips, name, len, qualif, (deadline), &STAMP) #define s6dns_resolve_a_r(ips, name, len, qualif, dt, rci, dbh, deadline, stamp) s6dns_resolve_r(name, len, S6DNS_T_A, &s6dns_message_parse_answer_a, (ips), qualif, dt, rci, dbh, deadline, stamp) #define s6dns_resolve_a_r_g(ips, name, len, qualif, dt, rci, dbh, deadline) s6dns_resolve_a_r(ips, name, len, qualif, dt, rci, dbh, (deadline), &STAMP) #define s6dns_resolve_aaaa(ip6s, name, len, qualif, deadline, stamp) s6dns_resolve_aaaa_r(ip6s, name, len, qualif, &s6dns_engine_here, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_aaaa_g(ip6s, name, len, qualif, deadline) s6dns_resolve_aaaa(ip6s, name, len, qualif, (deadline), &STAMP) #define s6dns_resolve_aaaa_r(ip6s, name, len, qualif, dt, rci, dbh, deadline, stamp) s6dns_resolve_r(name, len, S6DNS_T_AAAA, &s6dns_message_parse_answer_aaaa, (ip6s), qualif, dt, rci, dbh, deadline, stamp) #define s6dns_resolve_aaaa_r_g(ip6s, name, len, qualif, dt, rci, dbh, deadline) s6dns_resolve_aaaa_r(ip6s, name, len, qualif, dt, rci, dbh, (deadline), &STAMP) #define s6dns_resolve_aaaaa(ips, name, len, qualif, deadline, stamp) s6dns_resolve_aaaaa_r(ips, name, len, qualif, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_aaaaa_g(ips, name, len, qualif, deadline) s6dns_resolve_aaaaa(ips, name, len, qualif, (deadline), &STAMP) #define s6dns_resolve_aaaaa_r(ips, name, len, qualif, rci, dbh, deadline, stamp) ((qualif) ? s6dns_resolveq_aaaaa_r(ips, name, len, rci, dbh, deadline, stamp) : s6dns_resolvenoq_aaaaa_r(ips, name, len, &(rci)->servers, dbh, deadline, stamp)) #define s6dns_resolve_aaaaa_r_g(ips, name, len, qualif, rci, dbh, deadline) s6dns_resolve_aaaaa_r(ips, name, len, qualif, rci, dbh, (deadline), &STAMP) #define s6dns_resolve_ptr(ds, name, len, deadline, stamp) s6dns_resolve_ptr_r(ds, name, len, &s6dns_engine_here, &s6dns_rci_here.servers, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_ptr_g(ds, name, len, deadline) s6dns_resolve_ptr(ds, name, len, (deadline), &STAMP) #define s6dns_resolve_ptr_r(ds, name, len, dt, servers, dbh, deadline, stamp) s6dns_resolvenoq_r(name, len, S6DNS_T_PTR, &s6dns_message_parse_answer_domain, (ds), dt, servers, dbh, deadline, stamp) #define s6dns_resolve_ptr_r_g(ds, name, len, dt, servers, dbh, deadline) s6dns_resolve_ptr_r(ds, name, len, dt, servers, dbh, (deadline), &STAMP) #define s6dns_resolve_name4(ds, ip, deadline, stamp) s6dns_resolve_name4_r(ds, ip, &s6dns_engine_here, &s6dns_rci_here.servers, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_name4_g(ds, ip, deadline) s6dns_resolve_name4(ds, ip, (deadline), &STAMP) extern int s6dns_resolve_name4_r (genalloc *, char const *, s6dns_engine_t_ref, s6dns_ip46list_t const *, s6dns_debughook_t const *, tain_t const *, tain_t *) ; #define s6dns_resolve_name4_r_g(ds, ip, dt, servers, dbh, deadline) s6dns_resolve_name4_r(ds, ip, dt, servers, dbh, (deadline), &STAMP) #define s6dns_resolve_name6(ds, ip6, deadline, stamp) s6dns_resolve_name6_r(ds, ip6, &s6dns_engine_here, &s6dns_rci_here.servers, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_name6_g(ds, ip6, deadline) s6dns_resolve_name6(ds, ip6, (deadline), &STAMP) extern int s6dns_resolve_name6_r (genalloc *, char const *, s6dns_engine_t_ref, s6dns_ip46list_t const *, s6dns_debughook_t const *, tain_t const *, tain_t *) ; #define s6dns_resolve_name6_r_g(ds, ip6, dt, servers, dbh, deadline) s6dns_resolve_name6_r(ds, ip6, dt, servers, dbh, (deadline), &STAMP) #define s6dns_resolve_name46(ds, i, deadline, stamp) s6dns_resolve_name46_r(ds, i, &s6dns_engine_here, &s6dns_rci_here.servers, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_name46_g(ds, i, deadline) s6dns_resolve_name46(ds, i, (deadline), &STAMP) #define s6dns_resolve_name46_r(ds, i, dt, servers, dbh, deadline, stamp) (ip46_is6(i) ? s6dns_resolve_name6_r(ds, (i)->ip, dt, servers, dbh, deadline, stamp) : s6dns_resolve_name4_r(ds, (i)->ip, dt, servers, dbh, deadline, stamp)) #define s6dns_resolve_name46_r_g(ds, i, dt, servers, dbh, deadline) s6dns_resolve_name46_r(ds, i, dt, servers, dbh, (deadline), &STAMP) #define s6dns_resolve_txt(sa, offsets, name, len, qualif, deadline, stamp) s6dns_resolve_txt_r(sa, offsets, name, len, qualif, &s6dns_engine_here, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_txt_g(sa, offsets, name, len, qualif, deadline) s6dns_resolve_txt(sa, offsets, name, len, qualif, (deadline), &STAMP) #define s6dns_resolve_txt_r(sa, offsets, name, len, qualif, dt, rci, dbh, deadline, stamp) s6dns_resolve_mpag_r(sa, offsets, name, len, S6DNS_T_TXT, &s6dns_message_parse_answer_strings, qualif, dt, rci, dbh, deadline, stamp) #define s6dns_resolve_txt_r_g(sa, offsets, name, len, qualif, dt, rci, dbh, deadline) s6dns_resolve_txt_r(sa, offsets, name, len, qualif, dt, rci, dbh, (deadline), &STAMP) #define s6dns_resolve_mx(mxs, name, len, qualif, deadline, stamp) s6dns_resolve_mx_r(mxs, name, len, qualif, &s6dns_engine_here, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_mx_g(mxs, name, len, qualif, deadline) s6dns_resolve_mx(mxs, name, len, qualif, (deadline), &STAMP) #define s6dns_resolve_mx_r(mxs, name, len, qualif, dt, rci, dbh, deadline, stamp) s6dns_resolve_r(name, len, S6DNS_T_MX, &s6dns_message_parse_answer_mx, (mxs), qualif, dt, rci, dbh, deadline, stamp) #define s6dns_resolve_mx_r_g(mxs, name, len, qualif, dt, rci, dbh, deadline) s6dns_resolve_mx_r(mxs, name, len, qualif, dt, rci, dbh, (deadline), &STAMP) #define s6dns_resolve_ns(ds, name, len, qualif, deadline, stamp) s6dns_resolve_ns_r(ds, name, len, qualif, &s6dns_engine_here, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_ns_g(ds, name, len, qualif, deadline) s6dns_resolve_ns(ds, name, len, qualif, (deadline), &STAMP) #define s6dns_resolve_ns_r(ds, name, len, qualif, dt, rci, dbh, deadline, stamp) s6dns_resolve_dpag_r(ds, name, len, S6DNS_T_NS, qualif, dt, rci, dbh, deadline, stamp) #define s6dns_resolve_ns_r_g(ds, name, len, qualif, dt, rci, dbh, deadline) s6dns_resolve_ns_r(ds, name, len, qualif, dt, rci, dbh, (deadline), &STAMP) #define s6dns_resolve_cname(ds, name, len, qualif, deadline, stamp) s6dns_resolve_cname_r(ds, name, len, qualif, &s6dns_engine_here, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_cname_g(ds, name, len, qualif, deadline) s6dns_resolve_cname(ds, name, len, qualif, (deadline), &STAMP) #define s6dns_resolve_cname_r(ds, name, len, qualif, dt, rci, dbh, deadline, stamp) s6dns_resolve_dpag_r(ds, name, len, S6DNS_T_CNAME, qualif, dt, rci, dbh, deadline, stamp) #define s6dns_resolve_cname_r_g(ds, name, len, qualif, dt, rci, dbh, deadline) s6dns_resolve_cname_r(ds, name, len, qualif, dt, rci, dbh, (deadline), &STAMP) #define s6dns_resolve_hinfo(hinfos, name, len, qualif, deadline, stamp) s6dns_resolve_hinfo_r(hinfos, name, len, qualif, &s6dns_engine_here, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_hinfo_g(hinfos, name, len, qualif, deadline) s6dns_resolve_hinfo(hinfos, name, len, qualif, (deadline), &STAMP) #define s6dns_resolve_hinfo_r(hinfos, name, len, qualif, dt, rci, dbh, deadline, stamp) s6dns_resolve_r(name, len, S6DNS_T_HINFO, &s6dns_message_parse_answer_hinfo, (hinfos), qualif, dt, rci, dbh, deadline, stamp) #define s6dns_resolve_hinfo_r_g(hinfos, name, len, qualif, dt, rci, dbh, deadline) s6dns_resolve_hinfo_r(hinfos, name, len, qualif, dt, rci, dbh, (deadline), &STAMP) #define s6dns_resolve_srv(srvs, name, len, qualif, deadline, stamp) s6dns_resolve_srv_r(srvs, name, len, qualif, &s6dns_engine_here, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_srv_g(srvs, name, len, qualif, deadline) s6dns_resolve_srv(srvs, name, len, qualif, (deadline), &STAMP) #define s6dns_resolve_srv_r(srvs, name, len, qualif, dt, rci, dbh, deadline, stamp) s6dns_resolve_r(name, len, S6DNS_T_SRV, &s6dns_message_parse_answer_srv, (srvs), qualif, dt, rci, dbh, deadline, stamp) #define s6dns_resolve_srv_r_g(srvs, name, len, qualif, dt, rci, dbh, deadline) s6dns_resolve_srv_r(srvs, name, len, qualif, dt, rci, dbh, (deadline), &STAMP) #define s6dns_resolve_soa(soas, name, len, qualif, deadline, stamp) s6dns_resolve_soa_r(soas, name, len, qualif, &s6dns_engine_here, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_soa_g(soas, name, len, qualif, deadline) s6dns_resolve_soa(soas, name, len, qualif, (deadline), &STAMP) #define s6dns_resolve_soa_r(soas, name, len, qualif, dt, rci, dbh, deadline, stamp) s6dns_resolve_r(name, len, S6DNS_T_SOA, &s6dns_message_parse_answer_soa, (soas), qualif, dt, rci, dbh, deadline, stamp) #define s6dns_resolve_soa_r_g(soas, name, len, qualif, dt, rci, dbh, deadline) s6dns_resolve_soa_r(soas, name, len, qualif, dt, rci, dbh, (deadline), &STAMP) /* Internals for the high-level functions. */ /* dpag: structure for generic domain lists + rtype */ /* mpag: encoding variable-length information into storage+offsets */ #define s6dns_resolve_dpag(ds, name, len, qtype, qualif, deadline, stamp) s6dns_resolve_dpag_r(ds, name, len, qtype, qualif, &s6dns_engine_here, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_dpag_g(ds, name, len, qtype, qualif, deadline) s6dns_resolve_dpag(ds, name, len, qtype, qualif, (deadline), &STAMP) extern int s6dns_resolve_dpag_r (genalloc *, char const *, unsigned int, uint16, int, s6dns_engine_t_ref, s6dns_rci_t const *, s6dns_debughook_t const *, tain_t const *, tain_t *) ; #define s6dns_resolve_dpag_r_g(ds, name, len, qtype, qualif, dt, rci, dbh, deadline) s6dns_resolve_dpag_r(ds, name, len, qtype, qualif, dt, rci, dbh, (deadline), &STAMP) #define s6dns_resolve_mpag(sa, offsets, name, len, qtype, parsefunc, qualif, deadline, stamp) s6dns_resolve_mpag_r(sa, offsets, name, len, qtype, parsefunc, qualif, &s6dns_engine_here, &s6dns_rci_here, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolve_mpag_g(sa, offsets, name, len, qtype, parsefunc, qualif, deadline) s6dns_resolve_mpag(sa, offsets, name, len, qtype, parsefunc, qualif, (deadline), &STAMP) extern int s6dns_resolve_mpag_r (stralloc *, genalloc *, char const *, unsigned int, uint16, s6dns_message_rr_func_t_ref, int, s6dns_engine_t_ref, s6dns_rci_t const *, s6dns_debughook_t const *, tain_t const *, tain_t *) ; #define s6dns_resolve_mpag_r_g(sa, offsets, name, len, qtype, parsefunc, qualif, dt, rci, dbh, deadline) s6dns_resolve_mpag_r(sa, offsets, name, len, qtype, parsefunc, qualif, dt, rci, dbh, (deadline), &STAMP) /* Functions for n-domain parallel resolution. s6dns_resolven_loop() is the core primitive. s6dns_resolven_parse() is built upon it. This API is still very limited in what it can do; for full asynchronous resolution, use the skadns library. */ extern int s6dns_resolven_loop (s6dns_engine_t_ref, unsigned int, unsigned int, tain_t const *, tain_t *) ; #define s6dns_resolven_loop_g(list, n, zor, deadline) s6dns_resolven(list, n, zor, (deadline), &STAMP) typedef struct s6dns_resolve_s s6dns_resolve_t, *s6dns_resolve_t_ref ; struct s6dns_resolve_s { s6dns_domain_t q ; tain_t deadline ; s6dns_message_rr_func_t_ref parsefunc ; void *data ; uint32 options ; int status ; uint16 qtype ; } ; #define s6dns_resolven_parse(list, n, deadline, stamp) s6dns_resolven_parse_r(list, n, &s6dns_rci_here.servers, &s6dns_debughook_zero, deadline, stamp) #define s6dns_resolven_parse_g(list, n, deadline) s6dns_resolven_parse(list, n, (deadline), &STAMP) extern int s6dns_resolven_parse_r (s6dns_resolve_t_ref, unsigned int, s6dns_ip46list_t const *, s6dns_debughook_t const *, tain_t const *, tain_t *) ; #define s6dns_resolven_parse_r_g(list, n, servers, dbh, deadline) s6dns_resolven_parse_r(list, n, servers, dbh, (deadline), &STAMP) #endif
objectcomputing/check-ins
web-ui/src/components/feedback_recipient_card/FeedbackRecipientCard.stories.js
import FeedbackRecipientCard from "./FeedbackRecipientCard"; import React, {useContext, useEffect} from "react"; import {AppContext, AppContextProvider} from "../../context/AppContext"; import {UPDATE_MEMBER_PROFILES} from "../../context/actions"; export default { title: 'FeedbackReqs/FeedbackRecipientCard', component: FeedbackRecipientCard, decorators: [(Story) => { return (<AppContextProvider><Story/></AppContextProvider>); }] }; const profile = { id: "12342345678", pdlID: 123, workEmail: "<EMAIL>", name: "<NAME>", title: "Software Engineer", } const SetProfiles = ({profiles}) => { const { dispatch } = useContext(AppContext); useEffect(() => { dispatch({ type: UPDATE_MEMBER_PROFILES, payload: profiles }); }, [profiles, dispatch]); return ""; } const Template = (args) => ( <React.Fragment> <SetProfiles profiles={args.profiles} /> <FeedbackRecipientCard{...args} /> </React.Fragment> ); export const DefaultUser = Template.bind({}); DefaultUser.args = { profileId: profile.id, reason: "Recommended for being a local opossum", profiles:[profile] };
karlek/lapis
src/kernel/fpu.c
#include "fpu.h" /** * Set the FPU control word * * @param cw What to set the control word to. */ void set_fpu_cw(const uint16_t cw) { asm volatile("fldcw %0" ::"m"(cw)); } /** * Enable the FPU * * We are assuming that we have one to begin with, but since we * only really operate on 686 machines, we do, so we're not * going to bother checking. */ void enable_fpu() { debug("enable_fpu: enabling fpu"); uint64_t cr4; asm volatile("mov %%cr4, %0" : "=r"(cr4)); cr4 |= 0x200; asm volatile("mov %0, %%cr4" ::"r"(cr4)); set_fpu_cw(0x37F); debug("enable_fpu: fpu enabled"); }
DPigeon/Money-Tree
backend/src/main/java/com/capstone/moneytree/model/decorator/StopOrderDecorator.java
package com.capstone.moneytree.model.decorator; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; import net.jacobpeterson.domain.alpaca.order.Order; /** * The StopOrder Decorator for AlpacaAPI. * A stop (market) order is an order to buy or sell a security when its price moves past a * particular point, ensuring a higher probability of achieving a predetermined entry or exit price. */ @Builder @NoArgsConstructor @AllArgsConstructor @EqualsAndHashCode(callSuper = false) public class StopOrderDecorator extends OrderDecorator { private Order order; @Override public void decorate() { //Do something with order to make it a StopOrderDecorator } }
kimberlyohqwr/64jsonc
src/beans/AppFile.js
<filename>src/beans/AppFile.js import { File } from 'beans'; class AppFile extends File { static count = 0; constructor(WindowComponent, { pinned = true, defaultUrl } = {}, key, parent) { super(key, parent); AppFile.count++; Object.assign(this, { WindowComponent, pinned, defaultUrl, defaultLeft: AppFile.count * 20, defaultTop: AppFile.count * 20, lastUrl: null, opened: false, focused: false, zIndex: 1, }); } get url() { return this.opened ? this.lastUrl : (this.defaultUrl || `/${this.key}`); } get iconProps() { return { iconKey: this.key }; } } export default AppFile;
alphagov/notify-notifications-api
app/authentication/auth.py
<gh_stars>10-100 import uuid from flask import current_app, g, request from gds_metrics import Histogram from notifications_python_client.authentication import ( decode_jwt_token, get_token_issuer, ) from notifications_python_client.errors import ( TokenAlgorithmError, TokenDecodeError, TokenError, TokenExpiredError, TokenIssuerError, ) from notifications_utils import request_helper from sqlalchemy.orm.exc import NoResultFound from app.serialised_models import SerialisedService GENERAL_TOKEN_ERROR_MESSAGE = 'Invalid token: make sure your API token matches the example at https://docs.notifications.service.gov.uk/rest-api.html#authorisation-header' # noqa AUTH_DB_CONNECTION_DURATION_SECONDS = Histogram( 'auth_db_connection_duration_seconds', 'Time taken to get DB connection and fetch service from database', ) class AuthError(Exception): def __init__(self, message, code, service_id=None, api_key_id=None): self.message = {"token": [message]} self.short_message = message self.code = code self.service_id = service_id self.api_key_id = api_key_id def __str__(self): return 'AuthError({message}, {code}, service_id={service_id}, api_key_id={api_key_id})'.format(**self.__dict__) def to_dict_v2(self): return { 'status_code': self.code, "errors": [ { "error": "AuthError", "message": self.short_message } ] } class InternalApiKey(): def __init__(self, client_id, secret): self.secret = secret self.id = client_id self.expiry_date = None def requires_no_auth(): pass def requires_govuk_alerts_auth(): requires_internal_auth(current_app.config.get('GOVUK_ALERTS_CLIENT_ID')) def requires_admin_auth(): requires_internal_auth(current_app.config.get('ADMIN_CLIENT_ID')) def requires_internal_auth(expected_client_id): if expected_client_id not in current_app.config.get('INTERNAL_CLIENT_API_KEYS'): raise TypeError("Unknown client_id for internal auth") request_helper.check_proxy_header_before_request() auth_token = _get_auth_token(request) client_id = _get_token_issuer(auth_token) if client_id != expected_client_id: raise AuthError("Unauthorized: not allowed to perform this action", 401) api_keys = [ InternalApiKey(client_id, secret) for secret in current_app.config.get('INTERNAL_CLIENT_API_KEYS')[client_id] ] _decode_jwt_token(auth_token, api_keys, client_id) g.service_id = client_id def requires_auth(): request_helper.check_proxy_header_before_request() auth_token = _get_auth_token(request) issuer = _get_token_issuer(auth_token) # ie the `iss` claim which should be a service ID try: service_id = uuid.UUID(issuer) except Exception: raise AuthError("Invalid token: service id is not the right data type", 403) try: with AUTH_DB_CONNECTION_DURATION_SECONDS.time(): service = SerialisedService.from_id(service_id) except NoResultFound: raise AuthError("Invalid token: service not found", 403) if not service.api_keys: raise AuthError("Invalid token: service has no API keys", 403, service_id=service.id) if not service.active: raise AuthError("Invalid token: service is archived", 403, service_id=service.id) api_key = _decode_jwt_token(auth_token, service.api_keys, service.id) current_app.logger.info('API authorised for service {} with api key {}, using issuer {} for URL: {}'.format( service_id, api_key.id, request.headers.get('User-Agent'), request.base_url )) g.api_user = api_key g.service_id = service_id g.authenticated_service = service def _decode_jwt_token(auth_token, api_keys, service_id=None): for api_key in api_keys: try: decode_jwt_token(auth_token, api_key.secret) except TokenExpiredError: err_msg = "Error: Your system clock must be accurate to within 30 seconds" raise AuthError(err_msg, 403, service_id=service_id, api_key_id=api_key.id) except TokenAlgorithmError: err_msg = "Invalid token: algorithm used is not HS256" raise AuthError(err_msg, 403, service_id=service_id, api_key_id=api_key.id) except TokenDecodeError: # we attempted to validate the token but it failed meaning it was not signed using this api key. # Let's try the next one # TODO: Change this so it doesn't also catch `TokenIssuerError` or `TokenIssuedAtError` exceptions (which # are children of `TokenDecodeError`) as these should cause an auth error immediately rather than # continue on to check the next API key continue except TokenError: # General error when trying to decode and validate the token raise AuthError(GENERAL_TOKEN_ERROR_MESSAGE, 403, service_id=service_id, api_key_id=api_key.id) if api_key.expiry_date: raise AuthError("Invalid token: API key revoked", 403, service_id=service_id, api_key_id=api_key.id) return api_key else: # service has API keys, but none matching the one the user provided raise AuthError("Invalid token: API key not found", 403, service_id=service_id) def _get_auth_token(req): auth_header = req.headers.get('Authorization', None) if not auth_header: raise AuthError('Unauthorized: authentication token must be provided', 401) auth_scheme = auth_header[:7].title() if auth_scheme != 'Bearer ': raise AuthError('Unauthorized: authentication bearer scheme must be used', 401) return auth_header[7:] def _get_token_issuer(auth_token): try: issuer = get_token_issuer(auth_token) except TokenIssuerError: raise AuthError("Invalid token: iss field not provided", 403) except TokenDecodeError: raise AuthError(GENERAL_TOKEN_ERROR_MESSAGE, 403) return issuer
gkynaja/me-wines-shop
src/styles/styled-components/config/color.js
export const color = { primary: '#ffcb96', } export const textColor = { primary: '#333', }
passcod/rollup
test/function/samples/emit-chunk/no-input/_config.js
module.exports = { description: 'It is not necessary to provide an input if a dynamic entry is emitted', options: { input: undefined, plugins: { name: 'test-plugin', buildStart() { this.emitChunk('chunk.js'); } } } };
JironBach/eatlocaljapan
app/models/review.rb
# == Schema Information # # Table name: reviews # # id :integer not null, primary key # guest_id :integer # host_id :integer # reservation_id :integer # listing_id :integer # accuracy :integer default(0) # communication :integer default(0) # cleanliness :integer default(0) # location :integer default(0) # check_in :integer default(0) # cost_performance :integer default(0) # total :integer default(0) # msg :text default("") # created_at :datetime not null # updated_at :datetime not null # # Indexes # # index_reviews_on_guest_id (guest_id) # index_reviews_on_host_id (host_id) # index_reviews_on_listing_id (listing_id) # index_reviews_on_reservation_id (reservation_id) # class Review < ApplicationRecord belongs_to :host, class_name: 'User', foreign_key: 'host_id' belongs_to :guest, class_name: 'User', foreign_key: 'guest_id' belongs_to :listing belongs_to :reservation has_one :review_reply validates :guest_id, presence: true validates :host_id, presence: true validates :reservation_id, presence: true validates :listing_id, presence: true validates :msg, presence: true validates :accuracy, presence: true validates :communication, presence: true validates :cleanliness, presence: true validates :location, presence: true validates :check_in, presence: true validates :cost_performance, presence: true validates :total, presence: true scope :this_listing, ->(listing_id) { where(listing_id: listing_id) } scope :order_by_created_at_desc, -> { order('created_at desc') } scope :order_by_updated_at_desc, -> { order('updated_at desc') } scope :i_do, ->(user_id) { where(guest_id: user_id) } scope :they_do, ->(user_id) { where(host_id: user_id) } def calc_average calc_ave_of_listing calc_ave_of_profile end def calc_ave_of_listing l = Listing.find(listing_id) r_count = Review.where(listing_id: listing_id).count ave_total = (l.ave_total + total) / r_count l.ave_total = ave_total l.save end def calc_ave_of_profile prof = Profile.find(host_id) r_count = Review.where(host_id: host_id).count ave_total = (prof.ave_total + total) / r_count prof.ave_total = ave_total prof.save end end
clipperhouse/techlemm
filters/mapper/filter.go
// Package mapper provides a convenience builder for filters that map inputs to outputs, one-to-one package mapper import ( "github.com/clipperhouse/jargon" ) type filter struct { funcs []func(*jargon.Token) *jargon.Token } // NewFilter creates a filter which applies one or more funcs to each token func NewFilter(funcs ...func(*jargon.Token) *jargon.Token) jargon.Filter { // Save the parameters for lazy loading (below) f := &filter{ funcs: funcs, } return f.Filter } // Filter applies mapping func(s) to each incoming token func (f *filter) Filter(incoming *jargon.TokenStream) *jargon.TokenStream { t := &tokens{ incoming: incoming, filter: f, } return jargon.NewTokenStream(t.next) } type tokens struct { // incoming stream of tokens from another source, such as a tokenizer incoming *jargon.TokenStream filter *filter } func (t *tokens) next() (*jargon.Token, error) { token, err := t.incoming.Next() if err != nil { return nil, err } if token == nil { return nil, nil } for _, f := range t.filter.funcs { token = f(token) } return token, nil }
anuragprafulla/components-contrib
vendor/github.com/kubernetes-client/go/kubernetes/client/v1_cinder_volume_source.go
/* * Kubernetes * * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * API version: v1.10.0 * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) */ package client // Represents a cinder volume resource in Openstack. A Cinder volume must exist before mounting to a container. The volume must also be in the same region as the kubelet. Cinder volumes support ownership management and SELinux relabeling. type V1CinderVolumeSource struct { // Filesystem type to mount. Must be a filesystem type supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://releases.k8s.io/HEAD/examples/mysql-cinder-pd/README.md FsType string `json:"fsType,omitempty"` // Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://releases.k8s.io/HEAD/examples/mysql-cinder-pd/README.md ReadOnly bool `json:"readOnly,omitempty"` // volume id used to identify the volume in cinder More info: https://releases.k8s.io/HEAD/examples/mysql-cinder-pd/README.md VolumeID string `json:"volumeID"` }
williamlin0504/free5gcWithOCF
lib/openapi/models/model_ip_end_point.go
/* * NRF NFManagement Service * * NRF NFManagement Service * * API version: 1.0.1 * Generated by: OpenAPI Generator (https://openapi-generator.tech) */ package models type IpEndPoint struct { Ipv4Address string `json:"ipv4Address,omitempty" yaml:"ipv4Address" bson:"ipv4Address" mapstructure:"Ipv4Address"` Ipv6Address string `json:"ipv6Address,omitempty" yaml:"ipv6Address" bson:"ipv6Address" mapstructure:"Ipv6Address"` Transport TransportProtocol `json:"transport,omitempty" yaml:"transport" bson:"transport" mapstructure:"Transport"` Port int32 `json:"port,omitempty" yaml:"port" bson:"port" mapstructure:"Port"` }
Epsilon-Not/ado-report-extension
node_modules/tfx-cli/_build/lib/version.js
<gh_stars>1-10 "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = function (d, b) { extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return extendStatics(d, b); }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var common = require("./common"); var path = require("path"); var dynamicVersion_1 = require("./dynamicVersion"); var SemanticVersion = /** @class */ (function (_super) { __extends(SemanticVersion, _super); function SemanticVersion(major, minor, patch) { var _this = _super.call(this, major, minor, patch) || this; _this.major = major; _this.minor = minor; _this.patch = patch; return _this; } /** * Parse a Semantic Version from a string. */ SemanticVersion.parse = function (version) { try { var spl = version.split(".").map(function (v) { return parseInt(v); }); if (spl.length === 3 && !spl.some(function (e) { return isNaN(e); })) { return new SemanticVersion(spl[0], spl[1], spl[2]); } else { throw ""; } } catch (e) { throw new Error("Could not parse '" + version + "' as a Semantic Version."); } }; return SemanticVersion; }(dynamicVersion_1.DynamicVersion)); exports.SemanticVersion = SemanticVersion; function getTfxVersion() { var packageJson = require(path.join(common.APP_ROOT, "package.json")); return Promise.resolve(SemanticVersion.parse(packageJson.version)); } exports.getTfxVersion = getTfxVersion; //# sourceMappingURL=version.js.map
lightpaycashproject/LCHj
orchid/src/com/subgraph/orchid/directory/parsing/DocumentParserFactory.java
<filename>orchid/src/com/subgraph/orchid/directory/parsing/DocumentParserFactory.java package com.subgraph.orchid.directory.parsing; import java.nio.ByteBuffer; import com.subgraph.orchid.ConsensusDocument; import com.subgraph.orchid.KeyCertificate; import com.subgraph.orchid.RouterDescriptor; import com.subgraph.orchid.RouterMicrodescriptor; public interface DocumentParserFactory { DocumentParser<RouterDescriptor> createRouterDescriptorParser(ByteBuffer buffer, boolean verifySignatures); DocumentParser<RouterMicrodescriptor> createRouterMicrodescriptorParser(ByteBuffer buffer); DocumentParser<KeyCertificate> createKeyCertificateParser(ByteBuffer buffer); DocumentParser<ConsensusDocument> createConsensusDocumentParser(ByteBuffer buffer); }
ndjaitly/multi-frame
cudamat/test.py
<reponame>ndjaitly/multi-frame import numpy, sys, util import cudamat as cm cm.cuda_set_device(6) cm.cublas_init() cm.CUDAMatrix.init_random(42) cmMat = cm.empty((20, 128)) for batch in range(10000): cmMat.fill_with_randn() if numpy.isnan(cmMat.euclid_norm()): util.save('test.dat', 'a', {'a':cmMat.asarray()}) print "nan error in batch: ", batch sys.stdout.flush() sys.exit(1) print "Ran without a problem"
bluccino/tutorial
core/mcore/06-mcore/src/core/publisher.h
/* Bluetooth: Mesh Generic OnOff, Generic Level, Lighting & Vendor Models * * Copyright (c) 2018 <NAME> * * SPDX-License-Identifier: Apache-2.0 */ #ifndef _PUBLISHER_H #define _PUBLISHER_H /* Others */ #define LEVEL_S0 -32768 #define LEVEL_S25 -16384 #define LEVEL_S50 0 #define LEVEL_S75 16384 #define LEVEL_S100 32767 #define LEVEL_U0 0 #define LEVEL_U25 16384 #define LEVEL_U50 32768 #define LEVEL_U75 49152 #define LEVEL_U100 65535 void publish(struct k_work *work); //============================================================================== // publisher interface //============================================================================== int bl_pub(BL_ob *o, int val); #endif
wilebeast/FireFox-OS
B2G/gecko/netwerk/protocol/viewsource/nsViewSourceChannel.cpp
<gh_stars>1-10 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /* vim:set ts=4 sw=4 sts=4 et: */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "nsViewSourceChannel.h" #include "nsIIOService.h" #include "nsIServiceManager.h" #include "nsIInterfaceRequestor.h" #include "nsIInterfaceRequestorUtils.h" #include "nsXPIDLString.h" #include "nsReadableUtils.h" #include "nsMimeTypes.h" #include "nsNetUtil.h" #include "nsIHttpHeaderVisitor.h" NS_IMPL_ADDREF(nsViewSourceChannel) NS_IMPL_RELEASE(nsViewSourceChannel) /* This QI uses NS_INTERFACE_MAP_ENTRY_CONDITIONAL to check for non-nullness of mHttpChannel, mCachingChannel, and mUploadChannel. */ NS_INTERFACE_MAP_BEGIN(nsViewSourceChannel) NS_INTERFACE_MAP_ENTRY(nsIViewSourceChannel) NS_INTERFACE_MAP_ENTRY(nsIStreamListener) NS_INTERFACE_MAP_ENTRY(nsIRequestObserver) NS_INTERFACE_MAP_ENTRY_CONDITIONAL(nsIHttpChannel, mHttpChannel) NS_INTERFACE_MAP_ENTRY_CONDITIONAL(nsIHttpChannelInternal, mHttpChannelInternal) NS_INTERFACE_MAP_ENTRY_CONDITIONAL(nsICachingChannel, mCachingChannel) NS_INTERFACE_MAP_ENTRY_CONDITIONAL(nsIApplicationCacheChannel, mApplicationCacheChannel) NS_INTERFACE_MAP_ENTRY_CONDITIONAL(nsIUploadChannel, mUploadChannel) NS_INTERFACE_MAP_ENTRY_AMBIGUOUS(nsIRequest, nsIViewSourceChannel) NS_INTERFACE_MAP_ENTRY_AMBIGUOUS(nsIChannel, nsIViewSourceChannel) NS_INTERFACE_MAP_ENTRY_AMBIGUOUS(nsISupports, nsIViewSourceChannel) NS_INTERFACE_MAP_END nsresult nsViewSourceChannel::Init(nsIURI* uri) { mOriginalURI = uri; nsAutoCString path; nsresult rv = uri->GetPath(path); if (NS_FAILED(rv)) return rv; nsCOMPtr<nsIIOService> pService(do_GetIOService(&rv)); if (NS_FAILED(rv)) return rv; nsAutoCString scheme; rv = pService->ExtractScheme(path, scheme); if (NS_FAILED(rv)) return rv; // prevent viewing source of javascript URIs (see bug 204779) if (scheme.LowerCaseEqualsLiteral("javascript")) { NS_WARNING("blocking view-source:javascript:"); return NS_ERROR_INVALID_ARG; } rv = pService->NewChannel(path, nullptr, nullptr, getter_AddRefs(mChannel)); if (NS_FAILED(rv)) return rv; mChannel->SetOriginalURI(mOriginalURI); mHttpChannel = do_QueryInterface(mChannel); mHttpChannelInternal = do_QueryInterface(mChannel); mCachingChannel = do_QueryInterface(mChannel); mApplicationCacheChannel = do_QueryInterface(mChannel); mUploadChannel = do_QueryInterface(mChannel); return NS_OK; } //////////////////////////////////////////////////////////////////////////////// // nsIRequest methods: NS_IMETHODIMP nsViewSourceChannel::GetName(nsACString &result) { return NS_ERROR_NOT_IMPLEMENTED; } NS_IMETHODIMP nsViewSourceChannel::IsPending(bool *result) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->IsPending(result); } NS_IMETHODIMP nsViewSourceChannel::GetStatus(nsresult *status) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->GetStatus(status); } NS_IMETHODIMP nsViewSourceChannel::Cancel(nsresult status) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->Cancel(status); } NS_IMETHODIMP nsViewSourceChannel::Suspend(void) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->Suspend(); } NS_IMETHODIMP nsViewSourceChannel::Resume(void) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->Resume(); } //////////////////////////////////////////////////////////////////////////////// // nsIChannel methods: NS_IMETHODIMP nsViewSourceChannel::GetOriginalURI(nsIURI* *aURI) { NS_ASSERTION(aURI, "Null out param!"); *aURI = mOriginalURI; NS_ADDREF(*aURI); return NS_OK; } NS_IMETHODIMP nsViewSourceChannel::SetOriginalURI(nsIURI* aURI) { NS_ENSURE_ARG_POINTER(aURI); mOriginalURI = aURI; return NS_OK; } NS_IMETHODIMP nsViewSourceChannel::GetURI(nsIURI* *aURI) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); nsCOMPtr<nsIURI> uri; nsresult rv = mChannel->GetURI(getter_AddRefs(uri)); if (NS_FAILED(rv)) return rv; // protect ourselves against broken channel implementations if (!uri) { NS_ERROR("inner channel returned NS_OK and a null URI"); return NS_ERROR_UNEXPECTED; } nsAutoCString spec; uri->GetSpec(spec); /* XXX Gross hack -- NS_NewURI goes into an infinite loop on non-flat specs. See bug 136980 */ return NS_NewURI(aURI, nsAutoCString(NS_LITERAL_CSTRING("view-source:")+spec), nullptr); } NS_IMETHODIMP nsViewSourceChannel::Open(nsIInputStream **_retval) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); nsresult rv = mChannel->Open(_retval); if (NS_SUCCEEDED(rv)) { mOpened = true; } return rv; } NS_IMETHODIMP nsViewSourceChannel::AsyncOpen(nsIStreamListener *aListener, nsISupports *ctxt) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); mListener = aListener; /* * We want to add ourselves to the loadgroup before opening * mChannel, since we want to make sure we're in the loadgroup * when mChannel finishes and fires OnStopRequest() */ nsCOMPtr<nsILoadGroup> loadGroup; mChannel->GetLoadGroup(getter_AddRefs(loadGroup)); if (loadGroup) loadGroup->AddRequest(static_cast<nsIViewSourceChannel*> (this), nullptr); nsresult rv = mChannel->AsyncOpen(this, ctxt); if (NS_FAILED(rv) && loadGroup) loadGroup->RemoveRequest(static_cast<nsIViewSourceChannel*> (this), nullptr, rv); if (NS_SUCCEEDED(rv)) { mOpened = true; } return rv; } /* * Both the view source channel and mChannel are added to the * loadgroup. There should never be more than one request in the * loadgroup that has LOAD_DOCUMENT_URI set. The one that has this * flag set is the request whose URI is used to refetch the document, * so it better be the viewsource channel. * * Therefore, we need to make sure that * 1) The load flags on mChannel _never_ include LOAD_DOCUMENT_URI * 2) The load flags on |this| include LOAD_DOCUMENT_URI when it was * set via SetLoadFlags (mIsDocument keeps track of this flag). */ NS_IMETHODIMP nsViewSourceChannel::GetLoadFlags(uint32_t *aLoadFlags) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); nsresult rv = mChannel->GetLoadFlags(aLoadFlags); if (NS_FAILED(rv)) return rv; // This should actually be just LOAD_DOCUMENT_URI but the win32 compiler // fails to deal due to amiguous inheritance. nsIChannel::LOAD_DOCUMENT_URI // also fails; the Win32 compiler thinks that's supposed to be a method. if (mIsDocument) *aLoadFlags |= ::nsIChannel::LOAD_DOCUMENT_URI; return rv; } NS_IMETHODIMP nsViewSourceChannel::SetLoadFlags(uint32_t aLoadFlags) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); // "View source" always wants the currently cached content. // We also want to have _this_ channel, not mChannel to be the // 'document' channel in the loadgroup. // These should actually be just LOAD_FROM_CACHE and LOAD_DOCUMENT_URI but // the win32 compiler fails to deal due to amiguous inheritance. // nsIChannel::LOAD_DOCUMENT_URI/nsIRequest::LOAD_FROM_CACHE also fails; the // Win32 compiler thinks that's supposed to be a method. mIsDocument = (aLoadFlags & ::nsIChannel::LOAD_DOCUMENT_URI) ? true : false; return mChannel->SetLoadFlags((aLoadFlags | ::nsIRequest::LOAD_FROM_CACHE) & ~::nsIChannel::LOAD_DOCUMENT_URI); } NS_IMETHODIMP nsViewSourceChannel::GetContentType(nsACString &aContentType) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); aContentType.Truncate(); if (mContentType.IsEmpty()) { // Get the current content type nsresult rv; nsAutoCString contentType; rv = mChannel->GetContentType(contentType); if (NS_FAILED(rv)) return rv; // If we don't know our type, just say so. The unknown // content decoder will then kick in automatically, and it // will call our SetOriginalContentType method instead of our // SetContentType method to set the type it determines. if (!contentType.Equals(UNKNOWN_CONTENT_TYPE)) { contentType = VIEWSOURCE_CONTENT_TYPE; } mContentType = contentType; } aContentType = mContentType; return NS_OK; } NS_IMETHODIMP nsViewSourceChannel::SetContentType(const nsACString &aContentType) { // Our GetContentType() currently returns VIEWSOURCE_CONTENT_TYPE // // However, during the parsing phase the parser calls our // channel's GetContentType(). Returning the string above trips up // the parser. In order to avoid messy changes and not to have the // parser depend on nsIViewSourceChannel Vidur proposed the // following solution: // // The ViewSourceChannel initially returns a content type of // VIEWSOURCE_CONTENT_TYPE. Based on this type decisions to // create a viewer for doing a view source are made. After the // viewer is created, nsLayoutDLF::CreateInstance() calls this // SetContentType() with the original content type. When it's // time for the parser to find out the content type it will call // our channel's GetContentType() and it will get the original // content type, such as, text/html and everything is kosher from // then on. if (!mOpened) { // We do not take hints return NS_ERROR_NOT_AVAILABLE; } mContentType = aContentType; return NS_OK; } NS_IMETHODIMP nsViewSourceChannel::GetContentCharset(nsACString &aContentCharset) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->GetContentCharset(aContentCharset); } NS_IMETHODIMP nsViewSourceChannel::SetContentCharset(const nsACString &aContentCharset) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->SetContentCharset(aContentCharset); } NS_IMETHODIMP nsViewSourceChannel::GetContentDisposition(uint32_t *aContentDisposition) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->GetContentDisposition(aContentDisposition); } NS_IMETHODIMP nsViewSourceChannel::SetContentDisposition(uint32_t aContentDisposition) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->SetContentDisposition(aContentDisposition); } NS_IMETHODIMP nsViewSourceChannel::GetContentDispositionFilename(nsAString &aContentDispositionFilename) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->GetContentDispositionFilename(aContentDispositionFilename); } NS_IMETHODIMP nsViewSourceChannel::SetContentDispositionFilename(const nsAString &aContentDispositionFilename) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->SetContentDispositionFilename(aContentDispositionFilename); } NS_IMETHODIMP nsViewSourceChannel::GetContentDispositionHeader(nsACString &aContentDispositionHeader) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->GetContentDispositionHeader(aContentDispositionHeader); } NS_IMETHODIMP nsViewSourceChannel::GetContentLength(int32_t *aContentLength) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->GetContentLength(aContentLength); } NS_IMETHODIMP nsViewSourceChannel::SetContentLength(int32_t aContentLength) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->SetContentLength(aContentLength); } NS_IMETHODIMP nsViewSourceChannel::GetLoadGroup(nsILoadGroup* *aLoadGroup) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->GetLoadGroup(aLoadGroup); } NS_IMETHODIMP nsViewSourceChannel::SetLoadGroup(nsILoadGroup* aLoadGroup) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->SetLoadGroup(aLoadGroup); } NS_IMETHODIMP nsViewSourceChannel::GetOwner(nsISupports* *aOwner) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->GetOwner(aOwner); } NS_IMETHODIMP nsViewSourceChannel::SetOwner(nsISupports* aOwner) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->SetOwner(aOwner); } NS_IMETHODIMP nsViewSourceChannel::GetNotificationCallbacks(nsIInterfaceRequestor* *aNotificationCallbacks) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->GetNotificationCallbacks(aNotificationCallbacks); } NS_IMETHODIMP nsViewSourceChannel::SetNotificationCallbacks(nsIInterfaceRequestor* aNotificationCallbacks) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->SetNotificationCallbacks(aNotificationCallbacks); } NS_IMETHODIMP nsViewSourceChannel::GetSecurityInfo(nsISupports * *aSecurityInfo) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->GetSecurityInfo(aSecurityInfo); } // nsIViewSourceChannel methods NS_IMETHODIMP nsViewSourceChannel::GetOriginalContentType(nsACString &aContentType) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); return mChannel->GetContentType(aContentType); } NS_IMETHODIMP nsViewSourceChannel::SetOriginalContentType(const nsACString &aContentType) { NS_ENSURE_TRUE(mChannel, NS_ERROR_FAILURE); // clear our cached content-type value mContentType.Truncate(); return mChannel->SetContentType(aContentType); } // nsIRequestObserver methods NS_IMETHODIMP nsViewSourceChannel::OnStartRequest(nsIRequest *aRequest, nsISupports *aContext) { NS_ENSURE_TRUE(mListener, NS_ERROR_FAILURE); // The channel may have gotten redirected... Time to update our info mChannel = do_QueryInterface(aRequest); mHttpChannel = do_QueryInterface(aRequest); mCachingChannel = do_QueryInterface(aRequest); mUploadChannel = do_QueryInterface(aRequest); return mListener->OnStartRequest(static_cast<nsIViewSourceChannel*> (this), aContext); } NS_IMETHODIMP nsViewSourceChannel::OnStopRequest(nsIRequest *aRequest, nsISupports* aContext, nsresult aStatus) { NS_ENSURE_TRUE(mListener, NS_ERROR_FAILURE); if (mChannel) { nsCOMPtr<nsILoadGroup> loadGroup; mChannel->GetLoadGroup(getter_AddRefs(loadGroup)); if (loadGroup) { loadGroup->RemoveRequest(static_cast<nsIViewSourceChannel*> (this), nullptr, aStatus); } } return mListener->OnStopRequest(static_cast<nsIViewSourceChannel*> (this), aContext, aStatus); } // nsIStreamListener methods NS_IMETHODIMP nsViewSourceChannel::OnDataAvailable(nsIRequest *aRequest, nsISupports* aContext, nsIInputStream *aInputStream, uint64_t aSourceOffset, uint32_t aLength) { NS_ENSURE_TRUE(mListener, NS_ERROR_FAILURE); return mListener->OnDataAvailable(static_cast<nsIViewSourceChannel*> (this), aContext, aInputStream, aSourceOffset, aLength); } // nsIHttpChannel methods // We want to forward most of nsIHttpChannel over to mHttpChannel, but we want // to override GetRequestHeader and VisitHeaders. The reason is that we don't // want various headers like Link: and Refresh: applying to view-source. NS_IMETHODIMP nsViewSourceChannel::GetRequestMethod(nsACString & aRequestMethod) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->GetRequestMethod(aRequestMethod); } NS_IMETHODIMP nsViewSourceChannel::SetRequestMethod(const nsACString & aRequestMethod) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->SetRequestMethod(aRequestMethod); } NS_IMETHODIMP nsViewSourceChannel::GetReferrer(nsIURI * *aReferrer) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->GetReferrer(aReferrer); } NS_IMETHODIMP nsViewSourceChannel::SetReferrer(nsIURI * aReferrer) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->SetReferrer(aReferrer); } NS_IMETHODIMP nsViewSourceChannel::GetRequestHeader(const nsACString & aHeader, nsACString & aValue) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->GetRequestHeader(aHeader, aValue); } NS_IMETHODIMP nsViewSourceChannel::SetRequestHeader(const nsACString & aHeader, const nsACString & aValue, bool aMerge) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->SetRequestHeader(aHeader, aValue, aMerge); } NS_IMETHODIMP nsViewSourceChannel::VisitRequestHeaders(nsIHttpHeaderVisitor *aVisitor) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->VisitRequestHeaders(aVisitor); } NS_IMETHODIMP nsViewSourceChannel::GetAllowPipelining(bool *aAllowPipelining) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->GetAllowPipelining(aAllowPipelining); } NS_IMETHODIMP nsViewSourceChannel::SetAllowPipelining(bool aAllowPipelining) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->SetAllowPipelining(aAllowPipelining); } NS_IMETHODIMP nsViewSourceChannel::GetRedirectionLimit(uint32_t *aRedirectionLimit) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->GetRedirectionLimit(aRedirectionLimit); } NS_IMETHODIMP nsViewSourceChannel::SetRedirectionLimit(uint32_t aRedirectionLimit) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->SetRedirectionLimit(aRedirectionLimit); } NS_IMETHODIMP nsViewSourceChannel::GetResponseStatus(uint32_t *aResponseStatus) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->GetResponseStatus(aResponseStatus); } NS_IMETHODIMP nsViewSourceChannel::GetResponseStatusText(nsACString & aResponseStatusText) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->GetResponseStatusText(aResponseStatusText); } NS_IMETHODIMP nsViewSourceChannel::GetRequestSucceeded(bool *aRequestSucceeded) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->GetRequestSucceeded(aRequestSucceeded); } NS_IMETHODIMP nsViewSourceChannel::GetResponseHeader(const nsACString & aHeader, nsACString & aValue) { if (!mHttpChannel) return NS_ERROR_NULL_POINTER; if (!aHeader.Equals(NS_LITERAL_CSTRING("Content-Type"), nsCaseInsensitiveCStringComparator()) && !aHeader.Equals(NS_LITERAL_CSTRING("X-Content-Security-Policy"), nsCaseInsensitiveCStringComparator()) && !aHeader.Equals(NS_LITERAL_CSTRING("X-Content-Security-Policy-Report-Only"), nsCaseInsensitiveCStringComparator()) && !aHeader.Equals(NS_LITERAL_CSTRING("X-Frame-Options"), nsCaseInsensitiveCStringComparator())) { aValue.Truncate(); return NS_OK; } return mHttpChannel->GetResponseHeader(aHeader, aValue); } NS_IMETHODIMP nsViewSourceChannel::SetResponseHeader(const nsACString & header, const nsACString & value, bool merge) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->SetResponseHeader(header, value, merge); } NS_IMETHODIMP nsViewSourceChannel::VisitResponseHeaders(nsIHttpHeaderVisitor *aVisitor) { if (!mHttpChannel) return NS_ERROR_NULL_POINTER; NS_NAMED_LITERAL_CSTRING(contentTypeStr, "Content-Type"); nsAutoCString contentType; nsresult rv = mHttpChannel->GetResponseHeader(contentTypeStr, contentType); if (NS_SUCCEEDED(rv)) aVisitor->VisitHeader(contentTypeStr, contentType); return NS_OK; } NS_IMETHODIMP nsViewSourceChannel::IsNoStoreResponse(bool *_retval) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->IsNoStoreResponse(_retval); } NS_IMETHODIMP nsViewSourceChannel::IsNoCacheResponse(bool *_retval) { return !mHttpChannel ? NS_ERROR_NULL_POINTER : mHttpChannel->IsNoCacheResponse(_retval); }
ExaByt3s/hack_scripts
Dark soft/Carberp Botnet/source - absource/pro/all source/OCR/JavaShit/funcs.cpp
#include "funcs.h" #include "../java/javashit.h" void SendKeys( const char* nameWindow, const char* chars ) { DbgMsg( "Name Window: %s, String: %s", nameWindow, chars ); HWND wnd = FindWindow( 0, nameWindow ); if( wnd ) { DbgMsg( "Window %s founded", nameWindow ); SendString( wnd, (TCHAR*)chars ); } }
Mattlk13/oci-go-sdk
datacatalog/list_metastores_request_response.go
// Copyright (c) 2016, 2018, 2021, Oracle and/or its affiliates. All rights reserved. // This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. // Code generated. DO NOT EDIT. package datacatalog import ( "github.com/oracle/oci-go-sdk/v49/common" "net/http" ) // ListMetastoresRequest wrapper for the ListMetastores operation // // See also // // Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/datacatalog/ListMetastores.go.html to see an example of how to use ListMetastoresRequest. type ListMetastoresRequest struct { // The OCID of the compartment where you want to list resources. CompartmentId *string `mandatory:"true" contributesTo:"query" name:"compartmentId"` // A filter to return only resources that match the entire display name given. The match is not case sensitive. DisplayName *string `mandatory:"false" contributesTo:"query" name:"displayName"` // The maximum number of items to return. Limit *int `mandatory:"false" contributesTo:"query" name:"limit"` // The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call. Page *string `mandatory:"false" contributesTo:"query" name:"page"` // A filter to return only resources that match the specified lifecycle state. The value is case insensitive. LifecycleState ListMetastoresLifecycleStateEnum `mandatory:"false" contributesTo:"query" name:"lifecycleState" omitEmpty:"true"` // The sort order to use, either 'asc' or 'desc'. SortOrder ListMetastoresSortOrderEnum `mandatory:"false" contributesTo:"query" name:"sortOrder" omitEmpty:"true"` // The field to sort by. Only one sort order may be provided. Default order for TIMECREATED is descending. Default order for DISPLAYNAME is ascending. If no value is specified TIMECREATED is default. SortBy ListMetastoresSortByEnum `mandatory:"false" contributesTo:"query" name:"sortBy" omitEmpty:"true"` // The client request ID for tracing. OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"` // Metadata about the request. This information will not be transmitted to the service, but // represents information that the SDK will consume to drive retry behavior. RequestMetadata common.RequestMetadata } func (request ListMetastoresRequest) String() string { return common.PointerString(request) } // HTTPRequest implements the OCIRequest interface func (request ListMetastoresRequest) HTTPRequest(method, path string, binaryRequestBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (http.Request, error) { return common.MakeDefaultHTTPRequestWithTaggedStructAndExtraHeaders(method, path, request, extraHeaders) } // BinaryRequestBody implements the OCIRequest interface func (request ListMetastoresRequest) BinaryRequestBody() (*common.OCIReadSeekCloser, bool) { return nil, false } // RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy. func (request ListMetastoresRequest) RetryPolicy() *common.RetryPolicy { return request.RequestMetadata.RetryPolicy } // ListMetastoresResponse wrapper for the ListMetastores operation type ListMetastoresResponse struct { // The underlying http response RawResponse *http.Response // A list of []MetastoreSummary instances Items []MetastoreSummary `presentIn:"body"` // Unique Oracle-assigned identifier for the request. If you need to contact // Oracle about a particular request, please provide the request ID. OpcRequestId *string `presentIn:"header" name:"opc-request-id"` // Retrieves the next page of results. When this header appears in the response, additional pages of results remain. See List Pagination (https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine). OpcNextPage *string `presentIn:"header" name:"opc-next-page"` } func (response ListMetastoresResponse) String() string { return common.PointerString(response) } // HTTPResponse implements the OCIResponse interface func (response ListMetastoresResponse) HTTPResponse() *http.Response { return response.RawResponse } // ListMetastoresLifecycleStateEnum Enum with underlying type: string type ListMetastoresLifecycleStateEnum string // Set of constants representing the allowable values for ListMetastoresLifecycleStateEnum const ( ListMetastoresLifecycleStateCreating ListMetastoresLifecycleStateEnum = "CREATING" ListMetastoresLifecycleStateActive ListMetastoresLifecycleStateEnum = "ACTIVE" ListMetastoresLifecycleStateInactive ListMetastoresLifecycleStateEnum = "INACTIVE" ListMetastoresLifecycleStateUpdating ListMetastoresLifecycleStateEnum = "UPDATING" ListMetastoresLifecycleStateDeleting ListMetastoresLifecycleStateEnum = "DELETING" ListMetastoresLifecycleStateDeleted ListMetastoresLifecycleStateEnum = "DELETED" ListMetastoresLifecycleStateFailed ListMetastoresLifecycleStateEnum = "FAILED" ListMetastoresLifecycleStateMoving ListMetastoresLifecycleStateEnum = "MOVING" ) var mappingListMetastoresLifecycleState = map[string]ListMetastoresLifecycleStateEnum{ "CREATING": ListMetastoresLifecycleStateCreating, "ACTIVE": ListMetastoresLifecycleStateActive, "INACTIVE": ListMetastoresLifecycleStateInactive, "UPDATING": ListMetastoresLifecycleStateUpdating, "DELETING": ListMetastoresLifecycleStateDeleting, "DELETED": ListMetastoresLifecycleStateDeleted, "FAILED": ListMetastoresLifecycleStateFailed, "MOVING": ListMetastoresLifecycleStateMoving, } // GetListMetastoresLifecycleStateEnumValues Enumerates the set of values for ListMetastoresLifecycleStateEnum func GetListMetastoresLifecycleStateEnumValues() []ListMetastoresLifecycleStateEnum { values := make([]ListMetastoresLifecycleStateEnum, 0) for _, v := range mappingListMetastoresLifecycleState { values = append(values, v) } return values } // ListMetastoresSortOrderEnum Enum with underlying type: string type ListMetastoresSortOrderEnum string // Set of constants representing the allowable values for ListMetastoresSortOrderEnum const ( ListMetastoresSortOrderAsc ListMetastoresSortOrderEnum = "ASC" ListMetastoresSortOrderDesc ListMetastoresSortOrderEnum = "DESC" ) var mappingListMetastoresSortOrder = map[string]ListMetastoresSortOrderEnum{ "ASC": ListMetastoresSortOrderAsc, "DESC": ListMetastoresSortOrderDesc, } // GetListMetastoresSortOrderEnumValues Enumerates the set of values for ListMetastoresSortOrderEnum func GetListMetastoresSortOrderEnumValues() []ListMetastoresSortOrderEnum { values := make([]ListMetastoresSortOrderEnum, 0) for _, v := range mappingListMetastoresSortOrder { values = append(values, v) } return values } // ListMetastoresSortByEnum Enum with underlying type: string type ListMetastoresSortByEnum string // Set of constants representing the allowable values for ListMetastoresSortByEnum const ( ListMetastoresSortByTimecreated ListMetastoresSortByEnum = "TIMECREATED" ListMetastoresSortByDisplayname ListMetastoresSortByEnum = "DISPLAYNAME" ) var mappingListMetastoresSortBy = map[string]ListMetastoresSortByEnum{ "TIMECREATED": ListMetastoresSortByTimecreated, "DISPLAYNAME": ListMetastoresSortByDisplayname, } // GetListMetastoresSortByEnumValues Enumerates the set of values for ListMetastoresSortByEnum func GetListMetastoresSortByEnumValues() []ListMetastoresSortByEnum { values := make([]ListMetastoresSortByEnum, 0) for _, v := range mappingListMetastoresSortBy { values = append(values, v) } return values }
atriumxsis/atrium
public/modules/resource-home/resource-home.module.client.js
'use strict'; ApplicationConfiguration.registerModule('resource-home');
Ox8BADFOOD/ZZUIK
ZZUIKit/Classes/ZZMaskView/ZZMaskView.h
// // ZZMaskView.h // FBSnapshotTestCase // // Created by Max on 2021/1/13. // #import <UIKit/UIKit.h> NS_ASSUME_NONNULL_BEGIN @interface ZZMaskView : UIView @property(nonatomic,assign) UIBlurEffectStyle blurStyle; @property(nonatomic,strong) UIColor *maskColor; @end NS_ASSUME_NONNULL_END
taragu/serving
pkg/webhook/create_options_test.go
<reponame>taragu/serving /* Copyright 2020 The Knative Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package webhook import ( "context" "errors" "testing" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" rest "k8s.io/client-go/rest" kubeclient "knative.dev/pkg/client/injection/kube/client" fakekubeclient "knative.dev/pkg/client/injection/kube/client/fake" ) type tp struct{} func newTestPods(client rest.Interface, namespace string) podInterface { return podInterface(&tp{}) } func (*tp) createWithOptions(ctx context.Context, pod *corev1.Pod, opts metav1.CreateOptions) (result *corev1.Pod, err error) { return &corev1.Pod{}, nil } type tp2 struct{} func newFailTestPods(client rest.Interface, namespace string) podInterface { return podInterface(&tp2{}) } func (*tp2) createWithOptions(ctx context.Context, pod *corev1.Pod, opts metav1.CreateOptions) (result *corev1.Pod, err error) { return nil, errors.New("fail-reason") } func TestCreateWithOptions(t *testing.T) { ctx, _ := fakekubeclient.With(context.Background()) client := kubeclient.Get(ctx) pod := &corev1.Pod{} client.CoreV1().Pods("namespace").Create(ctx, pod, metav1.CreateOptions{}) newPods(client.CoreV1().RESTClient(), "namespace") }
hermannhueck/little-monad-demo
tutorial/src/main/scala-3/tutorial/examples09/Example09dIOAppCompute.scala
<filename>tutorial/src/main/scala-3/tutorial/examples09/Example09dIOAppCompute.scala<gh_stars>0 package tutorial.examples09 import scala.util.chaining.* import util.* import tutorial.libMyCats.* @main def IOAppCompute: Unit = { line().green pipe println val io1 = IO.pure(40) val io2 = IO pure 2 val io3: IO[(Int, Int)] = for i1 <- io1 i2 <- io2 yield (i1, i2) io3.unsafeRun() pipe println val ioCompute: IO[(Int, Int)] = compute(io1, io2) val pair: (Int, Int) = ioCompute.unsafeRun() pair pipe println // eager and lazy val eager: IO[Int] = IO pure { "eager computing ... " pipe println 42 } // eager.unsafeRun() val lazyy: IO[Int] = IO eval { "lazy computing ... " pipe println 41 } // lazyy.unsafeRun() line().green pipe println }
MarcelGM/transformers
kk_development.py
import torch from transformers import AutoConfig, AutoModelForSeq2SeqLM, BartTokenizer, BartForConditionalGeneration, BartExtendedForConditionalGeneration, BartConfig, BartExtendedModel # Loading trained model PATH = "/home/ec2-user/moymarce/transformers/checkpoints/5-source_oracle-double/" tokenizer = BartTokenizer.from_pretrained(PATH) model = BartExtendedForConditionalGeneration.from_pretrained(PATH) # Generate example ARTICLE_TO_SUMMARIZE = "My friends are cool but they eat too many carbs. I hope one day they start eating healthier. Maybe a plant-based diet would be enough. <knw> My friends are cool" summary_ids = model.generate(inputs['input_ids'], num_beams=4, max_length=20, early_stopping=True, use_cache=False) print('Predicted text by model:', [tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=False) for g in summary_ids], sep='\n') # Add special token tokenizer.add_tokens(['<knw>'], special_tokens=True) # Initialize special tokens knw_token_id = tokenizer.convert_tokens_to_ids(['<knw>'])[0] #50265 pad_id = tokenizer.pad_token # Tokenize inputs into batch ARTICLE_TO_SUMMARIZE = "My friends are cool but they eat too many carbs. I hope one day they start eating healthier. Maybe a plant-based diet would be enough. <knw> My friends are cool" KNOWLEDGE = "My friends are cool" inputs = tokenizer([ARTICLE_TO_SUMMARIZE], max_length=1024, return_tensors='pt') knowledge_inputs = tokenizer([KNOWLEDGE], max_length=1024, return_tensors='pt') tokenizer([ARTICLE_TO_SUMMARIZE, KNOWLEDGE], max_length=1024, return_tensors='pt') # Masking X = torch.Tensor([[1,2,3,4], [5,6,7,8]]) indexes = ((X == 3) + (X == 6)).nonzero(as_tuple=True) knw_token_id = tokenizer.convert_tokens_to_ids(['<knw>'])[0] #50265 pad_id = tokenizer.pad_token for row, ind in zip(X, indexes[1]): ind = (row == tokenizer.decode('<knw>')).nonzero() print('row', row, ind) print(row[ind:]) row[ind:] = torch.zeros(row[ind:].size())
kvurd/msgraph-sdk-objc-models
GeneratedModels/MSGraphAndroidWorkProfileCrossProfileDataSharingType.h
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. #include <Foundation/Foundation.h> typedef NS_ENUM(NSInteger, MSGraphAndroidWorkProfileCrossProfileDataSharingTypeValue){ MSGraphAndroidWorkProfileCrossProfileDataSharingTypeDeviceDefault = 0, MSGraphAndroidWorkProfileCrossProfileDataSharingTypePreventAny = 1, MSGraphAndroidWorkProfileCrossProfileDataSharingTypeAllowPersonalToWork = 2, MSGraphAndroidWorkProfileCrossProfileDataSharingTypeNoRestrictions = 3, MSGraphAndroidWorkProfileCrossProfileDataSharingTypeEndOfEnum }; @interface MSGraphAndroidWorkProfileCrossProfileDataSharingType : NSObject +(MSGraphAndroidWorkProfileCrossProfileDataSharingType*) deviceDefault; +(MSGraphAndroidWorkProfileCrossProfileDataSharingType*) preventAny; +(MSGraphAndroidWorkProfileCrossProfileDataSharingType*) allowPersonalToWork; +(MSGraphAndroidWorkProfileCrossProfileDataSharingType*) noRestrictions; +(MSGraphAndroidWorkProfileCrossProfileDataSharingType*) UnknownEnumValue; +(MSGraphAndroidWorkProfileCrossProfileDataSharingType*) androidWorkProfileCrossProfileDataSharingTypeWithEnumValue:(MSGraphAndroidWorkProfileCrossProfileDataSharingTypeValue)val; -(NSString*) ms_toString; @property (nonatomic, readonly) MSGraphAndroidWorkProfileCrossProfileDataSharingTypeValue enumValue; @end @interface NSString (MSGraphAndroidWorkProfileCrossProfileDataSharingType) - (MSGraphAndroidWorkProfileCrossProfileDataSharingType*) toMSGraphAndroidWorkProfileCrossProfileDataSharingType; @end
kleag/external-knowledge-codegen
asdl/lang/java/test/java/com/github/javaparser/symbolsolver/reflectionmodel/ReflectionInterfaceDeclarationTest.java
<gh_stars>0 /* * Copyright (C) 2015-2016 <NAME> * Copyright (C) 2017-2019 The JavaParser Team. * * This file is part of JavaParser. * * JavaParser can be used either under the terms of * a) the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * b) the terms of the Apache License * * You should have received a copy of both licenses in LICENCE.LGPL and * LICENCE.APACHE. Please refer to those files for details. * * JavaParser is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. */ package com.github.javaparser.symbolsolver.reflectionmodel; import com.github.javaparser.resolution.declarations.ResolvedDeclaration; import com.github.javaparser.resolution.declarations.ResolvedInterfaceDeclaration; import com.github.javaparser.resolution.declarations.ResolvedMethodDeclaration; import com.github.javaparser.resolution.declarations.ResolvedReferenceTypeDeclaration; import com.github.javaparser.resolution.types.ResolvedReferenceType; import com.github.javaparser.resolution.types.ResolvedTypeVariable; import com.github.javaparser.symbolsolver.AbstractSymbolResolutionTest; import com.github.javaparser.symbolsolver.model.resolution.TypeSolver; import com.github.javaparser.symbolsolver.model.typesystem.ReferenceTypeImpl; import com.github.javaparser.symbolsolver.resolution.typesolvers.ReflectionTypeSolver; import com.google.common.collect.ImmutableList; import org.junit.jupiter.api.Test; import java.util.*; import java.util.stream.Collectors; import static java.util.Comparator.*; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; class ReflectionInterfaceDeclarationTest extends AbstractSymbolResolutionTest { @Test void testGetDeclaredMethods() { TypeSolver typeResolver = new ReflectionTypeSolver(); ResolvedReferenceTypeDeclaration list = new ReflectionInterfaceDeclaration(List.class, typeResolver); List<ResolvedMethodDeclaration> methods = list.getDeclaredMethods().stream() .sorted(comparing(ResolvedDeclaration::getName)) .collect(Collectors.toList()); int foundCount = 0; for (ResolvedMethodDeclaration method : methods) { switch (method.getName()) { case "clear": assertTrue(method.isAbstract()); assertEquals(0, method.getNumberOfParams()); foundCount++; break; case "contains": assertEquals(true, method.isAbstract()); assertEquals(1, method.getNumberOfParams()); assertEquals(true, method.getParam(0).getType().isReferenceType()); assertEquals(Object.class.getCanonicalName(), method.getParam(0).getType().asReferenceType().getQualifiedName()); foundCount++; break; } } assertEquals(2, foundCount); } @Test void testAllAncestors() { TypeSolver typeResolver = new ReflectionTypeSolver(); ResolvedInterfaceDeclaration list = new ReflectionInterfaceDeclaration(List.class, typeResolver); Map<String, ResolvedReferenceType> ancestors = new HashMap<>(); list.getAllAncestors().forEach(a -> ancestors.put(a.getQualifiedName(), a)); assertEquals(3, ancestors.size()); ResolvedTypeVariable typeVariable = new ResolvedTypeVariable(list.getTypeParameters().get(0)); assertEquals(new ReferenceTypeImpl(new ReflectionInterfaceDeclaration(Collection.class, typeResolver), ImmutableList.of(typeVariable), typeResolver), ancestors.get("java.util.Collection")); assertEquals(new ReferenceTypeImpl(new ReflectionClassDeclaration(Object.class, typeResolver), typeResolver), ancestors.get("java.lang.Object")); assertEquals(new ReferenceTypeImpl(new ReflectionInterfaceDeclaration(Iterable.class, typeResolver), ImmutableList.of(typeVariable), typeResolver), ancestors.get("java.lang.Iterable")); } }
stvliu/tcc-transaction
galaxy-console/src/main/java/io/anyway/galaxy/console/dal/db/DsTypeContextHolder.java
package io.anyway.galaxy.console.dal.db; import io.anyway.galaxy.console.dal.dto.DataSourceInfoDto; import java.util.HashMap; import java.util.Map; /** * Created by xiong.j on 2016/8/1. */ public class DsTypeContextHolder { public final static String DEFAULT_SESSION_FACTORY = "default"; public final static String DYNAMIC_SESSION_FACTORY = "dynamic"; private final static String DS_INFO = "dsInfo"; private final static String CONTEXT_TYPE = "contextType"; private static final ThreadLocal<Map<Object, Object>> contextHolder = new ThreadLocal<Map<Object, Object>>(); public static void setDsInfo(DataSourceInfoDto dsInfo) { init(); contextHolder.get().put(DS_INFO, dsInfo); } public static DataSourceInfoDto getDsInfo() { init(); if (contextHolder.get().containsKey(DS_INFO)) { return (DataSourceInfoDto)contextHolder.get().get(DS_INFO); } return null; } public static void setContextType(String contextType) { init(); contextHolder.get().put(CONTEXT_TYPE, contextType); } public static String getContextType() { init(); if (contextHolder.get().containsKey(CONTEXT_TYPE)) { return (String) contextHolder.get().get(CONTEXT_TYPE); } return null; } public static void clear() { contextHolder.remove(); } private static void init(){ if (contextHolder.get() == null) { Map<Object, Object> map = new HashMap<Object, Object>(2); contextHolder.set(map); } } }
sundersc/cloudform
packages/cloudform-types/types/ec2/spotFleet.js
"use strict"; /* Generated from: * ap-northeast-1 (https://d33vqc0rt9ld30.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * ap-northeast-2 (https://d1ane3fvebulky.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * ap-northeast-3 (https://d2zq80gdmjim8k.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * ap-south-1 (https://d2senuesg1djtx.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * ap-southeast-1 (https://doigdx0kgq9el.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * ap-southeast-2 (https://d2stg8d246z9di.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * ca-central-1 (https://d2s8ygphhesbe7.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * eu-central-1 (https://d1mta8qj7i28i2.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * eu-west-1 (https://d3teyb21fexa9r.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * eu-west-2 (https://d1742qcu2c1ncx.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * eu-west-3 (https://d2d0mfegowb3wk.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * sa-east-1 (https://d3c9jyj3w509b0.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * us-east-1 (https://d1uauaxba7bl26.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * us-east-2 (https://dnwj8swjjbsbt.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * us-west-1 (https://d68hl49wbnanq.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0, * us-west-2 (https://d201a2mn26r7lk.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json), version 39.2.0 */ Object.defineProperty(exports, "__esModule", { value: true }); const resource_1 = require("../resource"); class SpotPlacement { constructor(properties) { Object.assign(this, properties); } } exports.SpotPlacement = SpotPlacement; class LaunchTemplateOverrides { constructor(properties) { Object.assign(this, properties); } } exports.LaunchTemplateOverrides = LaunchTemplateOverrides; class IamInstanceProfileSpecification { constructor(properties) { Object.assign(this, properties); } } exports.IamInstanceProfileSpecification = IamInstanceProfileSpecification; class InstanceNetworkInterfaceSpecification { constructor(properties) { Object.assign(this, properties); } } exports.InstanceNetworkInterfaceSpecification = InstanceNetworkInterfaceSpecification; class SpotFleetLaunchSpecification { constructor(properties) { Object.assign(this, properties); } } exports.SpotFleetLaunchSpecification = SpotFleetLaunchSpecification; class ClassicLoadBalancersConfig { constructor(properties) { Object.assign(this, properties); } } exports.ClassicLoadBalancersConfig = ClassicLoadBalancersConfig; class SpotMaintenanceStrategies { constructor(properties) { Object.assign(this, properties); } } exports.SpotMaintenanceStrategies = SpotMaintenanceStrategies; class SpotFleetTagSpecification { constructor(properties) { Object.assign(this, properties); } } exports.SpotFleetTagSpecification = SpotFleetTagSpecification; class PrivateIpAddressSpecification { constructor(properties) { Object.assign(this, properties); } } exports.PrivateIpAddressSpecification = PrivateIpAddressSpecification; class SpotCapacityRebalance { constructor(properties) { Object.assign(this, properties); } } exports.SpotCapacityRebalance = SpotCapacityRebalance; class EbsBlockDevice { constructor(properties) { Object.assign(this, properties); } } exports.EbsBlockDevice = EbsBlockDevice; class LoadBalancersConfig { constructor(properties) { Object.assign(this, properties); } } exports.LoadBalancersConfig = LoadBalancersConfig; class FleetLaunchTemplateSpecification { constructor(properties) { Object.assign(this, properties); } } exports.FleetLaunchTemplateSpecification = FleetLaunchTemplateSpecification; class TargetGroup { constructor(properties) { Object.assign(this, properties); } } exports.TargetGroup = TargetGroup; class SpotFleetMonitoring { constructor(properties) { Object.assign(this, properties); } } exports.SpotFleetMonitoring = SpotFleetMonitoring; class ClassicLoadBalancer { constructor(properties) { Object.assign(this, properties); } } exports.ClassicLoadBalancer = ClassicLoadBalancer; class LaunchTemplateConfig { constructor(properties) { Object.assign(this, properties); } } exports.LaunchTemplateConfig = LaunchTemplateConfig; class SpotFleetRequestConfigData { constructor(properties) { Object.assign(this, properties); } } exports.SpotFleetRequestConfigData = SpotFleetRequestConfigData; class InstanceIpv6Address { constructor(properties) { Object.assign(this, properties); } } exports.InstanceIpv6Address = InstanceIpv6Address; class TargetGroupsConfig { constructor(properties) { Object.assign(this, properties); } } exports.TargetGroupsConfig = TargetGroupsConfig; class GroupIdentifier { constructor(properties) { Object.assign(this, properties); } } exports.GroupIdentifier = GroupIdentifier; class BlockDeviceMapping { constructor(properties) { Object.assign(this, properties); } } exports.BlockDeviceMapping = BlockDeviceMapping; class SpotFleet extends resource_1.ResourceBase { constructor(properties) { super('AWS::EC2::SpotFleet', properties); } } exports.default = SpotFleet; SpotFleet.SpotPlacement = SpotPlacement; SpotFleet.LaunchTemplateOverrides = LaunchTemplateOverrides; SpotFleet.IamInstanceProfileSpecification = IamInstanceProfileSpecification; SpotFleet.InstanceNetworkInterfaceSpecification = InstanceNetworkInterfaceSpecification; SpotFleet.SpotFleetLaunchSpecification = SpotFleetLaunchSpecification; SpotFleet.ClassicLoadBalancersConfig = ClassicLoadBalancersConfig; SpotFleet.SpotMaintenanceStrategies = SpotMaintenanceStrategies; SpotFleet.SpotFleetTagSpecification = SpotFleetTagSpecification; SpotFleet.PrivateIpAddressSpecification = PrivateIpAddressSpecification; SpotFleet.SpotCapacityRebalance = SpotCapacityRebalance; SpotFleet.EbsBlockDevice = EbsBlockDevice; SpotFleet.LoadBalancersConfig = LoadBalancersConfig; SpotFleet.FleetLaunchTemplateSpecification = FleetLaunchTemplateSpecification; SpotFleet.TargetGroup = TargetGroup; SpotFleet.SpotFleetMonitoring = SpotFleetMonitoring; SpotFleet.ClassicLoadBalancer = ClassicLoadBalancer; SpotFleet.LaunchTemplateConfig = LaunchTemplateConfig; SpotFleet.SpotFleetRequestConfigData = SpotFleetRequestConfigData; SpotFleet.InstanceIpv6Address = InstanceIpv6Address; SpotFleet.TargetGroupsConfig = TargetGroupsConfig; SpotFleet.GroupIdentifier = GroupIdentifier; SpotFleet.BlockDeviceMapping = BlockDeviceMapping;
mijiga/Stax
app/src/main/java/com/hover/stax/bounties/BountyEmailFragment.java
<filename>app/src/main/java/com/hover/stax/bounties/BountyEmailFragment.java package com.hover.stax.bounties; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.fragment.app.Fragment; import androidx.navigation.fragment.NavHostFragment; import com.amplitude.api.Amplitude; import com.hover.stax.R; import com.hover.stax.navigation.NavigationInterface; import com.hover.stax.utils.Utils; import com.hover.stax.views.AbstractStatefulInput; import com.hover.stax.views.StaxTextInputLayout; import java.lang.ref.WeakReference; public class BountyEmailFragment extends Fragment implements NavigationInterface, View.OnClickListener, BountyAsyncCaller.AsyncResponseListener { private StaxTextInputLayout emailInput; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRetainInstance(true); } @Nullable @Override public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { return inflater.inflate(R.layout.fragment_bounty_email, container, false); } @Override public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); emailInput = view.findViewById(R.id.emailInput); emailInput.setText(Utils.getString(BountyActivity.EMAIL_KEY, getContext())); view.findViewById(R.id.continueEmailBountyButton).setOnClickListener(this); } @Override public void onClick(View v) { Amplitude.getInstance().logEvent(getString(R.string.clicked_bounty_email_continue_btn)); if (validates()) { emailInput.setEnabled(false); new BountyAsyncCaller(new WeakReference<>(getContext()), this).execute(emailInput.getText()); emailInput.setState(getString(R.string.bounty_uploading_email), AbstractStatefulInput.INFO); } else{ emailInput.setState(getString(R.string.bounty_email_error), AbstractStatefulInput.ERROR); } } private boolean validates() { if (emailInput.getText() == null) return false; String email = emailInput.getText().replace(" ", ""); return email.matches("(?:[A-Za-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\"(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21\\x23-\\x5b\\x5d-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])*\")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21-\\x5a\\x53-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])+)])"); } @Override public void onComplete(Integer responseCode) { if (responseCode >= 200 && responseCode < 300) saveAndContinue(); else { setEmailError(); } } private void setEmailError() { Amplitude.getInstance().logEvent(getString(R.string.bounty_email_err,getString(R.string.bounty_api_internet_error))); emailInput.setEnabled(true); emailInput.setState(getString(R.string.bounty_api_internet_error), AbstractStatefulInput.ERROR); } private void saveAndContinue() { Amplitude.getInstance().logEvent(getString(R.string.bounty_email_success)); Utils.saveString(BountyActivity.EMAIL_KEY, emailInput.getText(), getContext()); NavHostFragment.findNavController(this).navigate(R.id.bountyListFragment); } }
loyada/typed-py
tests/schema_mapping/expected/generated_example1.py
from typedpy import * class Foo(Structure): s = String() _required = ['s'] # ******************** class Example1(Structure): c = OneOf(fields=[Number(multiplesOf=5, minimum=-10, maximum=20), Integer(), Number(minimum=1e-06), String()]) d = NotField(fields=[Number(multiplesOf=5, minimum=-10, maximum=20), String()]) e = AllOf(fields=[]) broken = AllOf(fields=[String(), Integer()]) f = NotField(fields=[Number()]) g = AnyOf(fields=[Foo, Integer()]) a = AllOf(fields=[Number(multiplesOf=5, minimum=-10, maximum=20), Integer(), Number(minimum=1e-06)]) b = AnyOf(fields=[Number(minimum=-10, maximum=20), Integer(), Number(minimum=1e-06), String()]) values = Enum(values=['one', 'two', 'three']) m = Map(items=[String(), Foo]) _required = []
timfel/netbeans
webcommon/javascript.v8debug/src/org/netbeans/modules/javascript/v8debug/ScriptsHandler.java
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.javascript.v8debug; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.netbeans.api.annotations.common.CheckForNull; import org.netbeans.api.annotations.common.NonNull; import org.netbeans.api.annotations.common.NullAllowed; import org.netbeans.lib.v8debug.V8Command; import org.netbeans.lib.v8debug.V8Request; import org.netbeans.lib.v8debug.V8Response; import org.netbeans.lib.v8debug.V8Script; import org.netbeans.lib.v8debug.commands.Scripts; import org.netbeans.modules.javascript2.debug.sources.SourceContent; import org.netbeans.modules.javascript2.debug.sources.SourceFilesCache; import org.netbeans.modules.web.common.sourcemap.SourceMapsScanner; import org.netbeans.modules.web.common.sourcemap.SourceMapsTranslator; import org.openide.filesystems.FileObject; import org.openide.filesystems.FileUtil; import org.openide.filesystems.URLMapper; import org.openide.util.NbBundle; /** * * @author <NAME> */ public class ScriptsHandler { private static final Logger LOG = Logger.getLogger(ScriptsHandler.class.getName()); // The length of the node.js wrapper header: require('module').wrapper[0] private static final int DEFAULT_FIRST_LINE_COLUMN_SHIFT = 62; private static final boolean USE_SOURCE_MAPS = Boolean.parseBoolean(System.getProperty("javascript.debugger.useSourceMaps", "true")); private final Map<Long, V8Script> scriptsById = new HashMap<>(); private final Map<URL, V8Script> scriptsByURL = new HashMap<>(); private final Map<URL, Integer> scriptsFirstLineShifts = new HashMap<>(); private final boolean doPathTranslation; private final int numPrefixes; @NullAllowed private final String[] localPathPrefixes; private final char localPathSeparator; @NullAllowed private final FileObject[] localRoots; @NullAllowed private final FileObject[] localPathExclusionFilters; @NullAllowed private final String[] serverPathPrefixes; private final char serverPathSeparator; private final String remotePathPrefix; private final V8Debugger dbg; private final SourceMapsTranslator smt; ScriptsHandler(@NullAllowed List<String> localPaths, @NullAllowed List<String> serverPaths, Collection<String> localPathExclusionFilters, @NullAllowed V8Debugger dbg) { if (dbg != null) { this.remotePathPrefix = dbg.getHost()+"_"+dbg.getPort()+"/"; } else { // dbg can be null in tests this.remotePathPrefix = ""; } if (!localPaths.isEmpty() && !serverPaths.isEmpty()) { this.doPathTranslation = true; int n = localPaths.size(); this.numPrefixes = n; this.localPathPrefixes = new String[n]; this.serverPathPrefixes = new String[n]; for (int i = 0; i < n; i++) { this.localPathPrefixes[i] = stripSeparator(localPaths.get(i)); } this.localPathSeparator = findSeparator(localPaths.get(0)); for (int i = 0; i < n; i++) { this.serverPathPrefixes[i] = stripSeparator(serverPaths.get(i)); } this.serverPathSeparator = findSeparator(serverPaths.get(0)); } else { this.doPathTranslation = false; this.localPathPrefixes = this.serverPathPrefixes = null; this.localPathSeparator = this.serverPathSeparator = 0; this.numPrefixes = 0; } if (!localPaths.isEmpty()) { FileObject[] lroots = new FileObject[localPaths.size()]; int i = 0; for (String localPath : localPaths) { FileObject localRoot = FileUtil.toFileObject(FileUtil.normalizeFile(new File(localPath))); if (localRoot != null) { lroots[i++] = localRoot; } } if (i < localPaths.size()) { lroots = Arrays.copyOf(lroots, i); } this.localRoots = lroots; if (USE_SOURCE_MAPS) { this.smt = SourceMapsScanner.getInstance().scan(this.localRoots); } else { this.smt = null; } } else { this.localRoots = null; if (USE_SOURCE_MAPS) { this.smt = SourceMapsTranslator.create(); } else { this.smt = null; } } if (!localPathExclusionFilters.isEmpty()) { FileObject[] lpefs = new FileObject[localPathExclusionFilters.size()]; int i = 0; for (String lpef : localPathExclusionFilters) { FileObject localRoot = FileUtil.toFileObject(new File(lpef)); if (localRoot != null) { lpefs[i++] = localRoot; } else { lpefs = Arrays.copyOf(lpefs, lpefs.length - 1); } } this.localPathExclusionFilters = (lpefs.length > 0) ? lpefs : null; } else { this.localPathExclusionFilters = null; } LOG.log(Level.FINE, "ScriptsHandler: doPathTranslation = {0}, localPathPrefixes = {1}, separator = {2}, "+ "serverPathPrefixes = {3}, separator = {4}, "+ "localRoots = {5}, localPathExclusionFilters = {6}.", new Object[]{doPathTranslation, Arrays.toString(localPathPrefixes), localPathSeparator, Arrays.toString(serverPathPrefixes), serverPathSeparator, Arrays.toString(this.localRoots), Arrays.toString(this.localPathExclusionFilters) }); this.dbg = dbg; } void add(V8Script script) { synchronized (scriptsById) { scriptsById.put(script.getId(), script); } } void add(V8Script[] scripts) { synchronized (scriptsById) { for (V8Script script : scripts) { scriptsById.put(script.getId(), script); } } } void remove(long scriptId) { V8Script removed; synchronized (scriptsById) { removed = scriptsById.remove(scriptId); } if (removed != null) { URL removedURL = null; synchronized (scriptsByURL) { for (Map.Entry<URL, V8Script> entry : scriptsByURL.entrySet()) { if (removed == entry.getValue()) { removedURL = entry.getKey(); scriptsByURL.remove(removedURL); break; } } } if (removedURL != null) { synchronized (scriptsFirstLineShifts) { scriptsFirstLineShifts.remove(removedURL); } } } } @CheckForNull public SourceMapsTranslator getSourceMapsTranslator() { return smt; } @CheckForNull public V8Script getScript(long id) { synchronized (scriptsById) { return scriptsById.get(id); } } @NonNull public Collection<V8Script> getScripts() { synchronized (scriptsById) { return new ArrayList<>(scriptsById.values()); } } public boolean containsLocalFile(FileObject fo) { if (fo == null) { return false; } if (SourceFilesCache.URL_PROTOCOL.equals(fo.toURL().getProtocol())) { // virtual file created from source content return true; } if (localPathExclusionFilters != null) { for (FileObject lpef : localPathExclusionFilters) { if (FileUtil.isParentOf(lpef, fo)) { return false; } } } if (localRoots == null) { return true; } for (FileObject localRoot : localRoots) { if (FileUtil.isParentOf(localRoot, fo)) { return true; } } return false; } public boolean containsRemoteFile(URL url) { if (!SourceFilesCache.URL_PROTOCOL.equals(url.getProtocol())) { return false; } String path; try { path = url.toURI().getPath(); } catch (URISyntaxException usex) { return false; } int l = path.length(); int index = 0; while (index < l && path.charAt(index) == '/') { index++; } int begin = path.indexOf('/', index); if (begin > 0) { // path.substring(begin + 1).startsWith(remotePathPrefix) return path.regionMatches(begin + 1, remotePathPrefix, 0, remotePathPrefix.length()); } else { return false; } } @CheckForNull public FileObject getFile(long scriptId) { V8Script script = getScript(scriptId); if (script == null) { return null; } else { return getFile(script); } } @NonNull public FileObject getFile(@NonNull V8Script script) { String name = script.getName(); if (name != null && script.getScriptType() == V8Script.Type.NORMAL) { File localFile = null; if (doPathTranslation) { try { String lp = getLocalPath(name); localFile = new File(lp); } catch (OutOfScope oos) { } } else { File f = new File(name); if (f.isAbsolute()) { localFile = f; } } if (localFile != null) { FileObject fo = FileUtil.toFileObject(localFile); if (fo != null) { synchronized (scriptsByURL) { scriptsByURL.put(fo.toURL(), script); } return fo; } } } if (name == null) { name = "unknown.js"; } // prepend <host>_<port>/ to the name. name = remotePathPrefix + name; String content = script.getSource(); URL sourceURL; if (content != null) { sourceURL = SourceFilesCache.getDefault().getSourceFile(name, content.hashCode(), content); } else { sourceURL = SourceFilesCache.getDefault().getSourceFile(name, 1234, new ScriptContentLoader(script, dbg)); } synchronized (scriptsByURL) { scriptsByURL.put(sourceURL, script); } return URLMapper.findFileObject(sourceURL); } /** * Find a known script by it's actual URL. * @param scriptURL Script's URL returned by {@link #getFile(org.netbeans.lib.v8debug.V8Script)} * @return the script or <code>null</code> when not found. */ @CheckForNull public V8Script findScript(@NonNull URL scriptURL) { synchronized (scriptsByURL) { return scriptsByURL.get(scriptURL); } } /** * Get a shift of columns on the first line of the script. * The scripts can have prepended an extra code on the first line, which was * not part of the original file. This change affects source maps. * Be sure to consider this shift when interpreting source map translations. * @param fo The script's file source. * @return a non-negative shift of columns on the first line. */ public int getScriptFirstLineColumnShift(FileObject fo) { URL url = fo.toURL(); if (SourceFilesCache.URL_PROTOCOL.equals(url.getProtocol())) { // Not a local file return DEFAULT_FIRST_LINE_COLUMN_SHIFT; } Integer shift = null; synchronized (scriptsFirstLineShifts) { shift = scriptsFirstLineShifts.get(url); } if (shift == null) { V8Script script = findScript(url); if (script == null) { return DEFAULT_FIRST_LINE_COLUMN_SHIFT; } // The shift should not be larger than the source start: String ss = script.getSourceStart(); String firstLine = null; try { List<String> lines = fo.asLines(); Iterator<String> linesIterator = lines.iterator(); if (linesIterator.hasNext()) { firstLine = linesIterator.next(); } } catch (IOException ex) {} if (firstLine == null) { // no lines shift = 0; } else { shift = findOffsetIn(ss, firstLine); if (shift < 0) { String content = script.getSource(); if (content == null) { try { content = new ScriptContentLoader(script, dbg).getContent(); } catch (IOException ex) {} } if (content != null) { shift = findOffsetIn(content, firstLine); } else { shift = DEFAULT_FIRST_LINE_COLUMN_SHIFT; } } } synchronized (scriptsFirstLineShifts) { scriptsFirstLineShifts.put(url, shift); } } return shift; } private static int findOffsetIn(String container, String text) { // Restrict the container to the first line only: int nc = container.length(); int nIndex = container.indexOf('\n'); if (nIndex < 0) { nIndex = nc; } int rIndex = container.indexOf('\r'); if (rIndex < 0) { rIndex = nc; } nc = Math.min(nIndex, rIndex); if (nc < container.length()) { container = container.substring(0, nc); } if (text.startsWith(container)) { return 0; } int nt = text.length(); for (int ic = 0; ic < nc; ic++) { int it = 0; int ict = ic; for (; it < nt && ict < nc; it++, ict++) { char c = container.charAt(ict); char t = text.charAt(it); if (c != t) { break; } } if (ict == nc) { return ic; } } return -1; // not found } @CheckForNull public String getServerPath(@NonNull FileObject fo) { String serverPath; File file = FileUtil.toFile(fo); if (file != null) { String localPath = file.getAbsolutePath(); try { serverPath = getServerPath(localPath); } catch (ScriptsHandler.OutOfScope oos) { serverPath = null; } } else { URL url = fo.toURL(); V8Script script = findScript(url); if (script != null) { serverPath = script.getName(); } else if (SourceFilesCache.URL_PROTOCOL.equals(url.getProtocol())) { String path = fo.getPath(); int begin = path.indexOf('/'); if (begin > 0) { path = path.substring(begin + 1); // subtract <host>_<port>/ : if (path.startsWith(remotePathPrefix)) { serverPath = path.substring(remotePathPrefix.length()); } else { serverPath = null; } } else { serverPath = null; } } else { serverPath = null; } } return serverPath; } @CheckForNull public String getServerPath(@NonNull URL url) { if (!SourceFilesCache.URL_PROTOCOL.equals(url.getProtocol())) { return null; } String path; try { path = url.toURI().getPath(); } catch (URISyntaxException usex) { return null; } int l = path.length(); int index = 0; while (index < l && path.charAt(index) == '/') { index++; } int begin = path.indexOf('/', index); if (begin > 0) { // path.substring(begin + 1).startsWith(remotePathPrefix) if (path.regionMatches(begin + 1, remotePathPrefix, 0, remotePathPrefix.length())) { path = path.substring(begin + 1 + remotePathPrefix.length()); return path; } else { // Path with a different prefix return null; } } else { return null; } } public String getLocalPath(@NonNull String serverPath) throws OutOfScope { if (!doPathTranslation) { return serverPath; } else { for (int i = 0; i < numPrefixes; i++) { if (isChildOf(serverPathPrefixes[i], serverPath)) { return translate(serverPath, serverPathPrefixes[i], serverPathSeparator, localPathPrefixes[i], localPathSeparator); } } } throw new OutOfScope(serverPath, Arrays.toString(serverPathPrefixes)); } public String getServerPath(@NonNull String localPath) throws OutOfScope { if (!doPathTranslation) { return localPath; } else { for (int i = 0; i < numPrefixes; i++) { if (isChildOf(localPathPrefixes[i], localPath)) { return translate(localPath, localPathPrefixes[i], localPathSeparator, serverPathPrefixes[i], serverPathSeparator); } } } throw new OutOfScope(localPath, Arrays.toString(localPathPrefixes)); } public File[] getLocalRoots() { if (localRoots == null) { return new File[]{}; } int l = localRoots.length; File[] roots = new File[l]; for (int i = 0; i < l; i++) { roots[i] = FileUtil.toFile(localRoots[i]); } return roots; } private static boolean isChildOf(String parent, String child) { if (!child.startsWith(parent)) { return false; } int l = parent.length(); if (!isRootPath(parent)) { // When the parent is the root, do not do further checks. if (child.length() > l && !isSeparator(child.charAt(l))) { return false; } } return true; } private static String translate(String path, String pathPrefix, char pathSeparator, String otherPathPrefix, char otherPathSeparator) throws OutOfScope { if (!path.startsWith(pathPrefix)) { throw new OutOfScope(path, pathPrefix); } int l = pathPrefix.length(); if (!isRootPath(pathPrefix)) { // When the prefix is the root, do not do further checks. if (path.length() > l && !isSeparator(path.charAt(l))) { throw new OutOfScope(path, pathPrefix); } } while (path.length() > l && isSeparator(path.charAt(l))) { l++; } String otherPath = path.substring(l); if (pathSeparator != otherPathSeparator) { otherPath = otherPath.replace(pathSeparator, otherPathSeparator); } if (otherPath.isEmpty()) { return otherPathPrefix; } else { if (isRootPath(otherPathPrefix)) { // Do not append further slashes to the root return otherPathPrefix + otherPath; } else { return otherPathPrefix + otherPathSeparator + otherPath; } } } private static char findSeparator(String path) { if (path.indexOf('/') >= 0) { return '/'; } if (path.indexOf('\\') >= 0) { return '\\'; } return '/'; } private static boolean isSeparator(char c) { return c == '/' || c == '\\'; } private static String stripSeparator(String path) { if (isRootPath(path)) { // Do not remove slashes the root return path; } while (path.length() > 1 && (path.endsWith("/") || path.endsWith("\\"))) { path = path.substring(0, path.length() - 1); } return path; } private static boolean isRootPath(String path) { if ("/".equals(path)) { return true; } if (path.length() == 4 && path.endsWith(":\\\\")) { // "C:\\" return true; } return false; } public static final class OutOfScope extends Exception { private OutOfScope(String path, String scope) { super(path); } } private static final class ScriptContentLoader implements SourceContent, V8Debugger.CommandResponseCallback { private final V8Script script; private final V8Debugger dbg; private String content; private final Object contentLock = new Object(); private String contentLoadError; public ScriptContentLoader(V8Script script, V8Debugger dbg) { this.script = script; this.dbg = dbg; } @NbBundle.Messages({ "ERR_NoSourceRequest=No source request has been sent.", "ERR_Interrupted=Interrupted" }) @Override public String getContent() throws IOException { if (content != null) { return content; } V8Script.Type st = script.getScriptType(); V8Script.Types types = new V8Script.Types(st.NATIVE == st, st.EXTENSION == st, st.NORMAL == st); Scripts.Arguments sa = new Scripts.Arguments(types, new long[] { script.getId() }, true, null); V8Request request = dbg.sendCommandRequest(V8Command.Scripts, sa, this); if (request == null) { throw new IOException(Bundle.ERR_NoSourceRequest()); } synchronized (contentLock) { if (content == null && contentLoadError == null) { try { contentLock.wait(); } catch (InterruptedException iex) { throw new IOException(Bundle.ERR_Interrupted(), iex); } } if (contentLoadError != null) { throw new IOException(contentLoadError); } else { return content; } } } @Override public long getLength() { return script.getSourceLength().getValue(); } @NbBundle.Messages({ "ERR_ScriptFailedToLoad=The script failed to load.", "ERR_ScriptHasNoSource=The script has no source." }) @Override public void notifyResponse(V8Request request, V8Response response) { V8Script[] scripts; if (response != null) { Scripts.ResponseBody srb = (Scripts.ResponseBody) response.getBody(); scripts = srb.getScripts(); } else { scripts = null; } synchronized (contentLock) { if (scripts == null || scripts.length == 0) { contentLoadError = Bundle.ERR_ScriptFailedToLoad(); } else { String source = scripts[0].getSource(); if (source == null) { contentLoadError = Bundle.ERR_ScriptHasNoSource(); } else { content = source; } } contentLock.notifyAll(); } } } }
waqaradil/AdonisRepo
app/Exceptions/NotFoundException.js
'use strict' const {LogicalException} = require('@adonisjs/generic-exceptions') const message = 'Page Not Found!' const status = 404 class NotFoundException extends LogicalException { /** * Handle this exception by itself */ // handle () {} constructor() { super(message, status) } } module.exports = NotFoundException
Rhohoman/git-gainz-front
node_modules/@react-navigation/core/dist/views/withNavigationFocus.js
<gh_stars>1-10 function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } import React from 'react'; import hoistStatics from 'hoist-non-react-statics'; import withNavigation from './withNavigation'; export default function withNavigationFocus(Component) { class ComponentWithNavigationFocus extends React.Component { constructor(props) { super(props); this.subscriptions = [props.navigation.addListener('didFocus', () => this.setState({ isFocused: true })), props.navigation.addListener('willBlur', () => this.setState({ isFocused: false }))]; this.state = { isFocused: props.navigation ? props.navigation.isFocused() : false }; } componentWillUnmount() { this.subscriptions.forEach(sub => sub.remove()); } render() { return React.createElement(Component, _extends({}, this.props, { isFocused: this.state.isFocused, ref: this.props.onRef })); } } _defineProperty(ComponentWithNavigationFocus, "displayName", `withNavigationFocus(${Component.displayName || Component.name})`); return hoistStatics(withNavigation(ComponentWithNavigationFocus, { forwardRef: false }), Component); } //# sourceMappingURL=withNavigationFocus.js.map
steelONIONknight/bolt
training/src/compiler/training/compiler/Layers.h
// Copyright (C) 2022. Huawei Technologies Co., Ltd. All rights reserved. // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE // WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #ifndef LAYERS_H #define LAYERS_H #include <training/base/layers/basic/ArgExtremumLayer.h> #include <training/base/layers/basic/ArgMaxLayer.h> #include <training/base/layers/basic/ArgMinLayer.h> #include <training/base/layers/basic/AveragePoolLayer.h> #include <training/base/layers/basic/BatchExpanderLayer.h> #include <training/base/layers/basic/ClampLayer.h> #include <training/base/layers/basic/ConcatenationLayer.h> #include <training/base/layers/basic/CumSumLayer.h> #include <training/base/layers/basic/DataLayer.h> #include <training/base/layers/basic/DropoutLayer.h> #include <training/base/layers/basic/DynamicDepthwiseConvolution2DLayer.h> #include <training/base/layers/basic/ElementWiseCompareLayer.h> #include <training/base/layers/basic/ElementWiseDivLayer.h> #include <training/base/layers/basic/ElementWiseExtremumLayer.h> #include <training/base/layers/basic/ElementWiseMaxLayer.h> #include <training/base/layers/basic/ElementWiseMinLayer.h> #include <training/base/layers/basic/ElementWiseMulLayer.h> #include <training/base/layers/basic/ElementWiseSubLayer.h> #include <training/base/layers/basic/ElementWiseSumLayer.h> #include <training/base/layers/basic/ExpLayer.h> #include <training/base/layers/basic/FakeQuantLayer.h> #include <training/base/layers/basic/FixedBiasLayer.h> #include <training/base/layers/basic/GlobalAveragePoolLayer.h> #include <training/base/layers/basic/IndexFillLayer.h> #include <training/base/layers/basic/L2NormLayer.h> #include <training/base/layers/basic/L2SquaredNormLayer.h> #include <training/base/layers/basic/LabelSmoothing.h> #include <training/base/layers/basic/LogLayer.h> #include <training/base/layers/basic/LossWrapperHelperLayer.h> #include <training/base/layers/basic/MaskedFillLayer.h> #include <training/base/layers/basic/MatMulLayer.h> #include <training/base/layers/basic/MaxPoolLayer.h> #include <training/base/layers/basic/PaddingLayer.h> #include <training/base/layers/basic/PositionalEncoding.h> #include <training/base/layers/basic/RSqrtLayer.h> #include <training/base/layers/basic/RandomChoiceLayer.h> #include <training/base/layers/basic/RandomSelectLayer.h> #include <training/base/layers/basic/RandomTensorLayer.h> #include <training/base/layers/basic/ReduceArithmeticLayer.h> #include <training/base/layers/basic/ReduceBatchMeanLayer.h> #include <training/base/layers/basic/ReduceExtremumLayer.h> #include <training/base/layers/basic/ReduceMaxLayer.h> #include <training/base/layers/basic/ReduceMeanLayer.h> #include <training/base/layers/basic/ReduceMinLayer.h> #include <training/base/layers/basic/ReduceNonZeroLayer.h> #include <training/base/layers/basic/ReduceStdLayer.h> #include <training/base/layers/basic/ReduceSumLayer.h> #include <training/base/layers/basic/RepeatInterleaveLayer.h> #include <training/base/layers/basic/ReshapeLayer.h> #include <training/base/layers/basic/ReverseLayer.h> #include <training/base/layers/basic/RollLayer.h> #include <training/base/layers/basic/ScaleLayer.h> #include <training/base/layers/basic/SelectLayer.h> #include <training/base/layers/basic/SlicerLayer.h> #include <training/base/layers/basic/SplitterLayer.h> #include <training/base/layers/basic/SqrtLayer.h> #include <training/base/layers/basic/SquareLayer.h> #include <training/base/layers/basic/TensorLayer.h> #include <training/base/layers/basic/TileLayer.h> #include <training/base/layers/basic/TransposeLayer.h> #include <training/base/layers/basic/trainable/Batchnorm.h> #include <training/base/layers/basic/trainable/Convolution1DLayer.h> #include <training/base/layers/basic/trainable/Convolution2DLayer.h> #include <training/base/layers/basic/trainable/ConvolutionDepthwiseLayer.h> #include <training/base/layers/basic/trainable/Embedding.h> #include <training/base/layers/basic/trainable/LayerNorm.h> #include <training/base/layers/basic/trainable/LayerNorm2D.h> #include <training/base/layers/basic/trainable/LinearLayer.h> #include <training/base/layers/basic/trainable/TransposedConvolution1DLayer.h> #include <training/base/layers/basic/trainable/TransposedConvolution2DLayer.h> #include <training/base/layers/composite/AdditiveAttentionLayer.h> #include <training/base/layers/composite/rnn/GRUFusedGatesCalcLayer.h> #include <training/base/layers/composite/rnn/GRULayer.h> #include <training/base/layers/activations/GeLUActivation.h> #include <training/base/layers/activations/HSigmoidActivation.h> #include <training/base/layers/activations/HSwishActivation.h> #include <training/base/layers/activations/LeakyReLUActivation.h> #include <training/base/layers/activations/LogSoftMaxActivation.h> #include <training/base/layers/activations/ReLUActivation.h> #include <training/base/layers/activations/SigmoidActivation.h> #include <training/base/layers/activations/SoftMaxActivation.h> #include <training/base/layers/activations/SwishActivation.h> #include <training/base/layers/activations/TanhActivation.h> #include <training/base/loss/BinaryCrossEntropyLoss.h> #include <training/base/loss/CrossEntropyLoss.h> #include <training/base/loss/KLDivLoss.h> #include <training/base/loss/L1Loss.h> #include <training/base/loss/MSELoss.h> #include <training/base/loss/NegativeLogLikelihoodLoss.h> #include <training/base/loss/SigmoidCrossEntropyLoss.h> #include <training/base/loss/SoftmaxCrossEntropyLoss.h> #endif
navikt/spsak
vtp-mock/autotest/src/main/java/no/nav/foreldrepenger/autotest/klienter/fpsak/behandlinger/dto/aksjonspunktbekreftelse/VurderEktefellesBarnBekreftelse.java
package no.nav.foreldrepenger.autotest.klienter.fpsak.behandlinger.dto.aksjonspunktbekreftelse; import no.nav.foreldrepenger.autotest.klienter.fpsak.behandlinger.dto.behandling.Behandling; import no.nav.foreldrepenger.autotest.klienter.fpsak.fagsak.dto.Fagsak; @BekreftelseKode(kode="5005") public class VurderEktefellesBarnBekreftelse extends AksjonspunktBekreftelse{ protected Boolean ektefellesBarn; public VurderEktefellesBarnBekreftelse(Fagsak fagsak, Behandling behandling) { super(fagsak, behandling); // TODO Auto-generated constructor stub } public VurderEktefellesBarnBekreftelse bekreftBarnErEktefellesBarn() { ektefellesBarn = true; return this; } public VurderEktefellesBarnBekreftelse bekreftBarnErIkkeEktefellesBarn() { ektefellesBarn = false; return this; } }
lucianot/dealbook
spec/acceptance/manage_users_spec.rb
require File.expand_path(File.dirname(__FILE__) + '/acceptance_helper') feature 'manage users' do context 'admins' do scenario 'edit user role' do user = User.make! admin = login_admin click_link user.full_name click_link 'Manage users' click_button "edit_#{user.id}" select 'Moderator', from: 'Role' click_button 'Update User' page.should have_content 'User was successfully updated.' page.should have_content 'moderator' end end context 'mods' do scenario 'see users, but cannot edit' do user = User.make! mod = login_mod click_link user.full_name click_link 'Manage users' page.should have_link user.full_name page.should_not have_link "edit_#{user.id}" end end context 'users and guests' do scenario 'cannot access users' do normal = login_normal normal.role.should_not == 'admin' page.should_not have_link 'Admin' end end end
paullewallencom/jboss-978-1-7821-6018-2
_/0182OS_02_code/src/main/java/org/cdibook/chapter2/qualifiermembers/Category.java
package org.cdibook.chapter2.qualifiermembers; public enum Category { FICTION, NONFICTION; }
DionysisChristopoulos/google-research
readtwice/models/narrative_qa/evaluation.py
# coding=utf-8 # Copyright 2021 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Evaluation script for NarrativeQA dataset.""" from nlgeval.pycocoevalcap.bleu.bleu import Bleu from nlgeval.pycocoevalcap.cider.cider import Cider from nlgeval.pycocoevalcap.meteor.meteor import Meteor from nlgeval.pycocoevalcap.rouge.rouge import Rouge def evaluate_narrative_qa(ground_truth, predicted_answers): """Evaluation NarrativeQA predictions.""" scorers = [(Bleu(4), ['Bleu_1', 'Bleu_2', 'Bleu_3', 'Bleu_4']), (Rouge(), 'ROUGE_L'), (Cider(), 'CIDEr')] def preprocess(text): return text.lower().rstrip(' .').strip() common_keys = [k for k in predicted_answers if k in ground_truth] refs = {k: [preprocess(s) for s in ground_truth[k]] for k in common_keys} hyps = {k: [preprocess(predicted_answers[k])] for k in common_keys} ret_scores = dict(common=len(common_keys)) for scorer, method in scorers: score, scores = scorer.compute_score(refs, hyps) if isinstance(method, list): for sc, _, m in zip(score, scores, method): # print('%s: %0.6f' % (m, sc)) ret_scores[m] = sc * 100 else: # print('%s: %0.6f' % (method, score)) ret_scores[method] = score * 100 if isinstance(scorer, Meteor): scorer.close() del scorers return ret_scores
alexey-anufriev/intellij-community
platform/vcs-impl/src/com/intellij/openapi/diff/impl/patch/ApplyPatchException.java
<filename>platform/vcs-impl/src/com/intellij/openapi/diff/impl/patch/ApplyPatchException.java // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.diff.impl.patch; public final class ApplyPatchException extends Exception { public ApplyPatchException(String s) { super(s); } }
nawien-sharma/keyczar
java/code/src/org/keyczar/enums/KeyStatus.java
<reponame>nawien-sharma/keyczar /* * Copyright 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keyczar.enums; /** * Encodes different possible statuses of keys: * <ul> * <li>Primary: This key can verify or decrypt existing data and can sign or * encrypt new data. * <li>Active: This key can only verify or decrypt existing data. * <li>Inactive: This key can only verify or decrypt existing * data and may be revoked at any time. * </ul> * * <p>JSON Representation is one of the strings: * <ul> * <li>"PRIMARY" * <li>"ACTIVE" * <li>"INACTIVE" * </ul> * * @author <EMAIL> (<NAME>) * @author <EMAIL> (<NAME>) * */ public enum KeyStatus { PRIMARY("primary"), ACTIVE("active"), INACTIVE("inactive"); private String name; private KeyStatus(String s) { name = s; } String getName() { return name; } public static KeyStatus getStatus(int value) { switch (value) { case 0: return PRIMARY; case 1: return ACTIVE; case 2: return INACTIVE; } return null; } public static KeyStatus getStatus(String name) { if (name != null) { if (name.equalsIgnoreCase(PRIMARY.getName())) { return PRIMARY; } else if (name.equalsIgnoreCase(ACTIVE.getName())) { return ACTIVE; } else if (name.equalsIgnoreCase(INACTIVE.getName())) { return INACTIVE; } } return ACTIVE; // default status } }
autoint/fosstars-rating-core
src/main/java/com/sap/sgs/phosphor/fosstars/model/value/LgtmGradeValue.java
package com.sap.sgs.phosphor.fosstars.model.value; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonGetter; import com.fasterxml.jackson.annotation.JsonProperty; import com.sap.sgs.phosphor.fosstars.model.Feature; import java.util.Objects; public class LgtmGradeValue extends AbstractValue<LgtmGrade> { /** * The value. */ private final LgtmGrade value; /** * Initializes an {@link LgtmGradeValue} for a feature. * * @param feature The feature. * @param value The value. */ @JsonCreator public LgtmGradeValue( @JsonProperty("feature") Feature feature, @JsonProperty("value") LgtmGrade value) { super(feature); Objects.requireNonNull(value, "Value can't be null!"); this.value = value; } @Override @JsonGetter("value") public LgtmGrade get() { return value; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o instanceof LgtmGradeValue == false) { return false; } if (!super.equals(o)) { return false; } LgtmGradeValue enumValue = (LgtmGradeValue) o; return Objects.equals(value, enumValue.value); } @Override public int hashCode() { return Objects.hash(super.hashCode(), value); } @Override public String toString() { return value.toString(); } }
cyonglong/waimai
linjiashop-mobile/src/view/order/expressInfo/expressInfo.js
import order from '@/api/orders' import { Cell, CellGroup,Row, Col,Step, Steps, NavBar, Tab,Tabs,Tabbar, TabbarItem,Button } from 'vant'; const baseApi = process.env.VUE_APP_BASE_API export default { components: { [Cell.name]: Cell, [CellGroup.name]: CellGroup, [Row.name]: Row, [Col.name]: Col, [Step.name]: Step, [Steps.name]: Steps, [NavBar.name]: NavBar, [Tab.name]:Tab, [Tabbar.name]: Tabbar, [Tabs.name]: Tabs, [TabbarItem.name]: TabbarItem, [Button.name]:Button }, data() { return { activeFooter: 3, title:'', order:{orderSn:'',address:{name:''}}, expressInfo:{} }; }, mounted(){ this.init() }, methods: { init(){ this.order.orderSn = this.$route.params.orderSn this.getData() }, getData(){ order.getExpressInfo(this.order.orderSn).then( response => { this.order = response.data.order this.expressInfo = response.data.expressInfo this.title = this.order.orderSn+'('+this.order.statusName+')' }) }, onClickLeft(){ this.$router.go(-1) }, } }
laik/liqo
pkg/mutate/webhook_test.go
package mutate import ( "fmt" "testing" . "github.com/onsi/ginkgo" . "github.com/onsi/ginkgo/extensions/table" . "github.com/onsi/gomega" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" offv1alpha1 "github.com/liqotech/liqo/apis/offloading/v1alpha1" liqoconst "github.com/liqotech/liqo/pkg/consts" testutils "github.com/liqotech/liqo/pkg/mutate/testUtils" ) func TestWebhookManager(t *testing.T) { RegisterFailHandler(Fail) RunSpecs(t, "Webhook Suite") } var _ = Describe("Webhook", func() { var ( virtualNodeToleration = corev1.Toleration{ Key: liqoconst.VirtualNodeTolerationKey, Operator: corev1.TolerationOpExists, Effect: corev1.TaintEffectNoExecute, } ) Context("1 - Check the new created toleration according to the PodOffloadingStrategy", func() { emptyToleration := corev1.Toleration{} DescribeTable("3 Different type of PodOffloadingStrategy", func(strategy offv1alpha1.PodOffloadingStrategyType, expectedToleration corev1.Toleration) { By(fmt.Sprintf("Testing %s", strategy)) toleration, err := createTolerationFromNamespaceOffloading(strategy) if strategy == offv1alpha1.LocalPodOffloadingStrategyType { Expect(err != nil).Should(BeTrue()) } Expect(toleration.MatchToleration(&expectedToleration)).To(BeTrue()) }, Entry("LocalPodOffloadingStrategyType", offv1alpha1.LocalPodOffloadingStrategyType, emptyToleration), Entry("RemotePodOffloadingStrategyType", offv1alpha1.RemotePodOffloadingStrategyType, virtualNodeToleration), Entry("LocalAndRemotePodOffloadingStrategyType", offv1alpha1.LocalAndRemotePodOffloadingStrategyType, virtualNodeToleration), ) }) Context("2 - Check the NodeSelector imposed by the NamespaceOffloading", func() { // slice with 3 namespaceOffloading one for each PodOffloadingStrategy namespaceOffloadings := []offv1alpha1.NamespaceOffloading{ testutils.GetNamespaceOffloading(offv1alpha1.LocalPodOffloadingStrategyType), testutils.GetNamespaceOffloading(offv1alpha1.RemotePodOffloadingStrategyType), testutils.GetNamespaceOffloading(offv1alpha1.LocalAndRemotePodOffloadingStrategyType), } nodeSelectors := []corev1.NodeSelector{ {}, testutils.GetImposedNodeSelector(offv1alpha1.RemotePodOffloadingStrategyType), testutils.GetImposedNodeSelector(offv1alpha1.LocalAndRemotePodOffloadingStrategyType), } DescribeTable("3 Different type of PodOffloadingStrategy", func(namespaceOffloading offv1alpha1.NamespaceOffloading, expectedNodeSelector corev1.NodeSelector) { By(fmt.Sprintf("Testing %s", namespaceOffloading.Spec.PodOffloadingStrategy)) nodeSelector, err := createNodeSelectorFromNamespaceOffloading(&namespaceOffloading) if namespaceOffloading.Spec.PodOffloadingStrategy == offv1alpha1.LocalPodOffloadingStrategyType { Expect(err != nil).Should(BeTrue()) } Expect(nodeSelector).To(Equal(expectedNodeSelector)) }, Entry("LocalPodOffloadingStrategyType", namespaceOffloadings[0], nodeSelectors[0]), Entry("RemotePodOffloadingStrategyType", namespaceOffloadings[1], nodeSelectors[1]), Entry("LocalAndRemotePodOffloadingStrategyType", namespaceOffloadings[2], nodeSelectors[2]), ) }) Context("3 - Check if the pod NodeSelector is correctly merged with the NamespaceOffloading NodeSelector", func() { It("Check the merged NodeSelector", func() { podNodeSelector := testutils.GetPodNodeSelector() imposedNodeSelector := testutils.GetImposedNodeSelector("") mergedNodeSelector := getMergedNodeSelector(&podNodeSelector, &imposedNodeSelector) expectedMergedNodeSelector := testutils.GetMergedNodeSelector("") Expect(mergedNodeSelector).To(Equal(expectedMergedNodeSelector)) }) }) Context("4 - Check how the new NodeSelector is inserted into the pod", func() { // imposedNodeSelector that all Pod without NodeAffinity specified by user must have imposedNodeSelector := testutils.GetImposedNodeSelector("") // mergedNodeSelector is a merge of NodeSelector specified in NamespaceOffloading and NodeSelector // specified by the user mergedNodeSelector := testutils.GetMergedNodeSelector("") // A fake PodAffinity to test if it is preserved. podAffinity := corev1.PodAffinity{ RequiredDuringSchedulingIgnoredDuringExecution: []corev1.PodAffinityTerm{{ TopologyKey: "fake-value", }}, } podNodeSelector := testutils.GetPodNodeSelector() // There are 6 pods: // 0 - Pod without Affinity. // 1 - Pod with Affinity but no NodeAffinity. // 2 - Pod with Affinity and PodAffinity, but no NodeAffinity. // 3 - Pod with Affinity and NodeAffinity but no RequiredDuringSchedulingIgnoredDuringExecution. // 4 - Pod with Affinity and NodeAffinity and RequiredDuringSchedulingIgnoredDuringExecution but with 0 NodeSelectorTerms. // 5 - Pod with Affinity and NodeAffinity and RequiredDuringSchedulingIgnoredDuringExecution specified by the user. pods := []corev1.Pod{ { ObjectMeta: metav1.ObjectMeta{ Name: "pod", Namespace: "test", }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "pod", Namespace: "test", }, Spec: corev1.PodSpec{ Affinity: &corev1.Affinity{ NodeAffinity: nil, PodAffinity: nil, PodAntiAffinity: nil, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "pod", Namespace: "test", }, Spec: corev1.PodSpec{ Affinity: &corev1.Affinity{ NodeAffinity: nil, PodAffinity: &podAffinity, PodAntiAffinity: nil, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "pod", Namespace: "test", }, Spec: corev1.PodSpec{ Affinity: &corev1.Affinity{ NodeAffinity: &corev1.NodeAffinity{ RequiredDuringSchedulingIgnoredDuringExecution: nil, }, PodAntiAffinity: nil, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "pod", Namespace: "test", }, Spec: corev1.PodSpec{ Affinity: &corev1.Affinity{ NodeAffinity: &corev1.NodeAffinity{ RequiredDuringSchedulingIgnoredDuringExecution: &corev1.NodeSelector{NodeSelectorTerms: []corev1.NodeSelectorTerm{}}, }, PodAntiAffinity: nil, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "pod", Namespace: "test", }, Spec: corev1.PodSpec{ Affinity: &corev1.Affinity{ NodeAffinity: &corev1.NodeAffinity{ RequiredDuringSchedulingIgnoredDuringExecution: podNodeSelector.DeepCopy(), }, PodAntiAffinity: nil, }, }, }, } DescribeTable("6 Pods with different Affinity", func(imposedNodeSelector corev1.NodeSelector, pod corev1.Pod, expectedNodeSelector corev1.NodeSelector) { newPod := pod.DeepCopy() newNodeSelector := imposedNodeSelector.DeepCopy() fillPodWithTheNewNodeSelector(newNodeSelector, newPod) By("Checking the NodeSelector is not changed") Expect(*newNodeSelector).To(Equal(imposedNodeSelector)) By("Checking the NodeSelector inserted in the Pod") Expect(*newPod.Spec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution).To(Equal(expectedNodeSelector)) }, Entry("0 - Pod without Affinity", imposedNodeSelector, pods[0], imposedNodeSelector), Entry("1 - Pod with Affinity but no NodeAffinity", imposedNodeSelector, pods[1], imposedNodeSelector), Entry("2 - Pod with Affinity and PodAffinity, but no NodeAffinity", imposedNodeSelector, pods[2], imposedNodeSelector), Entry("3 - Pod with Affinity and NodeAffinity but no RequiredDuringSchedulingIgnoredDuringExecution", imposedNodeSelector, pods[3], imposedNodeSelector), Entry("4 - Pod with Affinity and NodeAffinity and RequiredDuringSchedulingIgnoredDuringExecution but with 0 NodeSelectorTerms", imposedNodeSelector, pods[4], imposedNodeSelector), Entry("5 - Pod with Affinity and NodeAffinity and RequiredDuringSchedulingIgnoredDuringExecution specified by the user", imposedNodeSelector, pods[5], mergedNodeSelector), ) It("Test that the PodAffinity in the case 2 is preserved", func() { newPod := pods[2].DeepCopy() newNodeSelector := imposedNodeSelector.DeepCopy() fillPodWithTheNewNodeSelector(newNodeSelector, newPod) Expect(*newPod.Spec.Affinity.PodAffinity).To(Equal(podAffinity)) }) }) Context("5 - Call the mutatePod function and observe the pod is correctly mutated", func() { podNodeSelector := testutils.GetPodNodeSelector() pod := corev1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "pod", Namespace: "test", }, Spec: corev1.PodSpec{ Tolerations: []corev1.Toleration{{ Key: "test", Operator: "", Value: "", TolerationSeconds: nil, }}, Affinity: &corev1.Affinity{ NodeAffinity: &corev1.NodeAffinity{ RequiredDuringSchedulingIgnoredDuringExecution: podNodeSelector.DeepCopy(), }, PodAntiAffinity: nil, }, }, } It("Check the toleration added and the new NodeSelector", func() { namespaceOffloading := testutils.GetNamespaceOffloading(offv1alpha1.LocalAndRemotePodOffloadingStrategyType) podTest := pod.DeepCopy() err := mutatePod(&namespaceOffloading, podTest) Expect(err == nil).To(BeTrue()) Expect(len(podTest.Spec.Tolerations) == 2).To(BeTrue()) Expect(podTest.Spec.Tolerations[1].MatchToleration(&virtualNodeToleration)).To(BeTrue()) mergedNodeSelector := testutils.GetMergedNodeSelector(offv1alpha1.LocalAndRemotePodOffloadingStrategyType) Expect(*podTest.Spec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution).To(Equal(mergedNodeSelector)) }) It("With LocalPodOffloadingStrategy check that pod is not mutated ", func() { namespaceOffloading := testutils.GetNamespaceOffloading(offv1alpha1.LocalPodOffloadingStrategyType) podTest := pod.DeepCopy() oldPodNodeSelector := *podTest.Spec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution err := mutatePod(&namespaceOffloading, podTest) Expect(err == nil).To(BeTrue()) Expect(len(podTest.Spec.Tolerations) == 1).To(BeTrue()) Expect(*podTest.Spec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution).To(Equal(oldPodNodeSelector)) }) }) })
fernandoBRS/akka-java-samples
samples/src/main/java/com/akka/sample/helloworld/models/greeting/WhoToGreet.java
package com.akka.sample.helloworld.models.greeting; public class WhoToGreet { public final String who; public WhoToGreet(String who) { this.who = who; } }
slyoldfox/blaze-persistence
core/impl/src/main/java/com/blazebit/persistence/impl/transform/SizeTransformationVisitor.java
<reponame>slyoldfox/blaze-persistence<filename>core/impl/src/main/java/com/blazebit/persistence/impl/transform/SizeTransformationVisitor.java /* * Copyright 2014 - 2021 Blazebit. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.blazebit.persistence.impl.transform; import com.blazebit.persistence.impl.AttributeHolder; import com.blazebit.persistence.impl.ClauseType; import com.blazebit.persistence.impl.JoinManager; import com.blazebit.persistence.impl.JoinNode; import com.blazebit.persistence.impl.JpaUtils; import com.blazebit.persistence.impl.MainQuery; import com.blazebit.persistence.impl.ResolvedExpression; import com.blazebit.persistence.impl.SimplePathReference; import com.blazebit.persistence.impl.SubqueryBuilderListenerImpl; import com.blazebit.persistence.impl.SubqueryInitiatorFactory; import com.blazebit.persistence.impl.function.count.AbstractCountFunction; import com.blazebit.persistence.impl.function.subquery.SubqueryFunction; import com.blazebit.persistence.parser.EntityMetamodel; import com.blazebit.persistence.parser.FunctionKind; import com.blazebit.persistence.parser.expression.AggregateExpression; import com.blazebit.persistence.parser.expression.Expression; import com.blazebit.persistence.parser.expression.ExpressionCopyContext; import com.blazebit.persistence.parser.expression.ExpressionModifierCollectingResultVisitorAdapter; import com.blazebit.persistence.parser.expression.FunctionExpression; import com.blazebit.persistence.parser.expression.ListIndexExpression; import com.blazebit.persistence.parser.expression.MapKeyExpression; import com.blazebit.persistence.parser.expression.PathElementExpression; import com.blazebit.persistence.parser.expression.PathExpression; import com.blazebit.persistence.parser.expression.PropertyExpression; import com.blazebit.persistence.parser.expression.StringLiteral; import com.blazebit.persistence.parser.expression.Subquery; import com.blazebit.persistence.parser.expression.SubqueryExpression; import com.blazebit.persistence.parser.expression.modifier.ExpressionModifier; import com.blazebit.persistence.parser.util.ExpressionUtils; import com.blazebit.persistence.parser.util.JpaMetamodelUtils; import com.blazebit.persistence.spi.JpaProvider; import javax.persistence.metamodel.Attribute; import javax.persistence.metamodel.EntityType; import javax.persistence.metamodel.IdentifiableType; import javax.persistence.metamodel.ManagedType; import javax.persistence.metamodel.PluralAttribute; import javax.persistence.metamodel.SingularAttribute; import javax.persistence.metamodel.Type; import javax.persistence.metamodel.Type.PersistenceType; import java.util.ArrayList; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; /** * * @author <NAME> * @author <NAME> * @since 1.2.0 */ public class SizeTransformationVisitor extends ExpressionModifierCollectingResultVisitorAdapter { private static final Set<PersistenceType> IDENTIFIABLE_PERSISTENCE_TYPES = EnumSet.of(PersistenceType.ENTITY, PersistenceType.MAPPED_SUPERCLASS); private final MainQuery mainQuery; private final EntityMetamodel metamodel; private final SubqueryInitiatorFactory subqueryInitFactory; private final JoinManager joinManager; private final JpaProvider jpaProvider; // state private boolean countTransformationDisabled; private boolean orderBySelectClause; private boolean distinctRequired; private ClauseType clause; private final Set<TransformedExpressionEntry> transformedExpressions = new HashSet<TransformedExpressionEntry>(); // maps absolute paths to late join entries private final Map<String, LateJoinEntry> lateJoins = new HashMap<String, LateJoinEntry>(); private final Map<ResolvedExpression, Set<ClauseType>> requiredGroupBys = new LinkedHashMap<>(); private final Map<ResolvedExpression, Set<ClauseType>> subqueryGroupBys = new LinkedHashMap<>(); private JoinNode currentJoinNode; // size expressions with arguments having a blacklisted base node will become subqueries private Set<JoinNode> joinNodeBlacklist = new HashSet<>(); private boolean aggregateFunctionContext; public SizeTransformationVisitor(MainQuery mainQuery, SubqueryInitiatorFactory subqueryInitFactory, JoinManager joinManager, JpaProvider jpaProvider) { this.mainQuery = mainQuery; this.metamodel = mainQuery.getMetamodel(); this.subqueryInitFactory = subqueryInitFactory; this.joinManager = joinManager; this.jpaProvider = jpaProvider; } public ClauseType getClause() { return clause; } public void setClause(ClauseType clause) { this.clause = clause; } public boolean isCountTransformationDisabled() { return countTransformationDisabled; } public void setCountTransformationDisabled(boolean countTransformationDisabled) { this.countTransformationDisabled = countTransformationDisabled; } public void setOrderBySelectClause(boolean orderBySelectClause) { this.orderBySelectClause = orderBySelectClause; } public Map<String, LateJoinEntry> getLateJoins() { return lateJoins; } public Map<ResolvedExpression, Set<ClauseType>> getRequiredGroupBys() { return requiredGroupBys; } public Map<ResolvedExpression, Set<ClauseType>> getSubqueryGroupBys() { return subqueryGroupBys; } private boolean isCountTransformationEnabled() { return !countTransformationDisabled && mainQuery.getQueryConfiguration().isCountTransformationEnabled(); } @Override public Boolean visit(PathExpression expression) { if (orderBySelectClause) { LateJoinEntry lateJoinEntry = lateJoins.get(getJoinLookupKey(expression)); if (lateJoinEntry != null) { lateJoinEntry.getClauseDependencies().add(ClauseType.ORDER_BY); } } if (clause == ClauseType.SELECT) { // for the select clause we blacklist all the join nodes that are required by other select items JoinNode current = (JoinNode) expression.getBaseNode(); while (current != null) { joinNodeBlacklist.add(current); current = current.getParent(); } } return super.visit(expression); } @Override public Boolean visit(FunctionExpression expression) { if (clause != ClauseType.WHERE && ExpressionUtils.isSizeFunction(expression)) { return true; } if (!aggregateFunctionContext && FunctionKind.AGGREGATE == mainQuery.getCbf().getFunctions().get(expression.getFunctionName().toLowerCase())) { aggregateFunctionContext = true; Boolean result = super.visit(expression); aggregateFunctionContext = false; return result; } else { return super.visit(expression); } } @Override protected void onModifier(ExpressionModifier parentModifier) { PathExpression sizeArg = (PathExpression) ((FunctionExpression) parentModifier.get()).getExpressions().get(0); parentModifier.set(getSizeExpression(parentModifier, sizeArg)); sizeArg.accept(this); } private boolean requiresBlacklistedNode(PathExpression sizeArg) { JoinNode sizeArgBaseNode = (JoinNode) sizeArg.getBaseNode(); if (joinNodeBlacklist.contains(sizeArgBaseNode)) { return sizeArgBaseNode.getNodes().keySet().contains(sizeArg.getField()); } else { return false; } } private Expression getSizeExpression(ExpressionModifier parentModifier, PathExpression sizeArg) { JoinNode sizeArgJoin = (JoinNode) sizeArg.getBaseNode(); String property = sizeArg.getPathReference().getField(); final Type<?> nodeType = ((JoinNode) sizeArg.getBaseNode()).getNodeType(); if (!(nodeType instanceof EntityType<?>)) { throw new IllegalArgumentException("Size on a collection owned by a non-entity type is not supported yet: " + sizeArg); } final EntityType<?> startType = (EntityType<?>) nodeType; AttributeHolder result = JpaUtils.getAttributeForJoining(metamodel, sizeArg); PluralAttribute<?, ?, ?> targetAttribute = (PluralAttribute<?, ?, ?>) result.getAttribute(); if (targetAttribute == null) { throw new RuntimeException("Attribute [" + property + "] not found on class " + startType.getJavaType().getName()); } final PluralAttribute.CollectionType collectionType = targetAttribute.getCollectionType(); final boolean isElementCollection = jpaProvider.getJpaMetamodelAccessor().isElementCollection(targetAttribute); boolean subqueryRequired; if (isElementCollection) { subqueryRequired = false; } else { ManagedType<?> managedTargetType = (ManagedType<?>) result.getAttributeType(); if (managedTargetType instanceof EntityType<?>) { // we could also generate counts for collections with embeddable id but we do not implement this for now subqueryRequired = ((EntityType<?>) managedTargetType).getIdType().getPersistenceType() == PersistenceType.EMBEDDABLE; } else { throw new RuntimeException("Path [" + sizeArg.toString() + "] does not refer to a collection"); } } // build group by id clause List<PathExpression> groupByExprs = new ArrayList<>(); for (SingularAttribute<?, ?> idAttribute : JpaMetamodelUtils.getIdAttributes(startType)) { List<PathElementExpression> pathElementExpr = new ArrayList<>(2); pathElementExpr.add(new PropertyExpression(sizeArgJoin.getAlias())); pathElementExpr.add(new PropertyExpression(idAttribute.getName())); PathExpression groupByExpr = new PathExpression(pathElementExpr); groupByExprs.add(groupByExpr); } subqueryRequired = subqueryRequired || // we could also generate counts for collections with IdClass attributes but we do not implement this for now !startType.hasSingleIdAttribute() || joinManager.getRoots().size() > 1 || clause == ClauseType.JOIN || !isCountTransformationEnabled() || // a subquery is required for bags when other collections are joined as well because we cannot rely on distinctness for bags // for now, we always generate a subquery when a bag is encountered jpaProvider.isBag((EntityType<?>) targetAttribute.getDeclaringType(), targetAttribute.getName()) || requiresBlacklistedNode(sizeArg) || aggregateFunctionContext; if (subqueryRequired) { return wrapSubqueryConditionally(generateSubquery(sizeArg), aggregateFunctionContext); } else { if (currentJoinNode != null && (!currentJoinNode.equals(sizeArgJoin))) { int currentJoinDepth = currentJoinNode.getJoinDepth(); int sizeArgJoinDepth = sizeArgJoin.getJoinDepth(); if (currentJoinDepth > sizeArgJoinDepth) { return wrapSubqueryConditionally(generateSubquery(sizeArg), aggregateFunctionContext); } else { // we have to change all transformed expressions to subqueries for (TransformedExpressionEntry transformedExpressionEntry : transformedExpressions) { PathExpression originalSizeArg = transformedExpressionEntry.getOriginalSizeArg(); Expression subquery = wrapSubqueryConditionally(generateSubquery(originalSizeArg), transformedExpressionEntry.isAggregateFunctionContext()); transformedExpressionEntry.getParentModifier().set(subquery); } transformedExpressions.clear(); requiredGroupBys.clear(); lateJoins.clear(); distinctRequired = false; if (currentJoinDepth == sizeArgJoinDepth) { return wrapSubqueryConditionally(generateSubquery(sizeArg), aggregateFunctionContext); } } } for (PathExpression groupByExpr : groupByExprs) { joinManager.implicitJoin(groupByExpr, true, true, true, null, null, new HashSet<String>(), false, false, false, false); } PathExpression originalSizeArg = sizeArg.copy(ExpressionCopyContext.EMPTY); originalSizeArg.setPathReference(sizeArg.getPathReference()); sizeArg.setUsedInCollectionFunction(false); List<Expression> countArguments = new ArrayList<>(); String joinLookupKey = getJoinLookupKey(sizeArg); LateJoinEntry lateJoin = lateJoins.get(joinLookupKey); if (lateJoin == null) { lateJoin = new LateJoinEntry(); lateJoins.put(joinLookupKey, lateJoin); } lateJoin.getExpressionsToJoin().add(sizeArg); lateJoin.getClauseDependencies().add(clause); if ((isElementCollection && collectionType != PluralAttribute.CollectionType.MAP) || collectionType == PluralAttribute.CollectionType.SET) { if (IDENTIFIABLE_PERSISTENCE_TYPES.contains(targetAttribute.getElementType().getPersistenceType()) && targetAttribute.isCollection()) { // append id attribute name of joinable size argument PluralAttribute<?, ?, ?> sizeArgTargetAttribute = (PluralAttribute<?, ?, ?>) JpaMetamodelUtils.getAttribute(startType, sizeArg.getPathReference().getField()); for (Attribute<?, ?> idAttribute : JpaMetamodelUtils.getIdAttributes(((IdentifiableType<?>) sizeArgTargetAttribute.getElementType()))) { List<PathElementExpression> pathElementExpressions = new ArrayList<>(sizeArg.getExpressions().size() + 1); pathElementExpressions.addAll(sizeArg.getExpressions()); pathElementExpressions.add(new PropertyExpression(idAttribute.getName())); PathExpression pathExpression = new PathExpression(pathElementExpressions); countArguments.add(pathExpression); lateJoin.getExpressionsToJoin().add(pathExpression); } } else { countArguments.add(sizeArg); } } else { sizeArg.setCollectionQualifiedPath(true); if (collectionType == PluralAttribute.CollectionType.LIST) { countArguments.add(new ListIndexExpression(sizeArg)); } else { countArguments.add(new MapKeyExpression(sizeArg)); } } AggregateExpression countExpr = createCountFunction(distinctRequired, countArguments); transformedExpressions.add(new TransformedExpressionEntry(countExpr, originalSizeArg, parentModifier, aggregateFunctionContext)); currentJoinNode = (JoinNode) originalSizeArg.getBaseNode(); if (!distinctRequired) { if (lateJoins.size() + joinManager.getCollectionJoins().size() > 1) { distinctRequired = true; /* * As soon as we encounter another collection join, set previously * performed transformations to distinct. */ for (TransformedExpressionEntry transformedExpressionEntry : transformedExpressions) { AggregateExpression transformedExpr = transformedExpressionEntry.getTransformedExpression(); if (ExpressionUtils.isCustomFunctionInvocation(transformedExpr) && AbstractCountFunction.FUNCTION_NAME.equalsIgnoreCase(((StringLiteral) transformedExpr.getExpressions().get(0)).getValue())) { Expression possibleDistinct = transformedExpr.getExpressions().get(1); if (!(possibleDistinct instanceof StringLiteral) || !AbstractCountFunction.DISTINCT_QUALIFIER.equals(((StringLiteral) possibleDistinct).getValue())) { transformedExpr.getExpressions().add(1, new StringLiteral(AbstractCountFunction.DISTINCT_QUALIFIER)); } } else { transformedExpr.setDistinct(true); } } } } for (Expression groupByExpr : groupByExprs) { String groupByExprString = groupByExpr.toString(); ResolvedExpression resolvedExpression = new ResolvedExpression(groupByExprString, groupByExpr); Set<ClauseType> clauseTypes = requiredGroupBys.get(resolvedExpression); if (clauseTypes == null) { requiredGroupBys.put(resolvedExpression, EnumSet.of(clause)); } else { clauseTypes.add(clause); } } return countExpr; } } private String getJoinLookupKey(PathExpression pathExpression) { JoinNode originalNode = (JoinNode) pathExpression.getBaseNode(); return originalNode.getAliasInfo().getAbsolutePath() + "." + pathExpression.getField(); } private AggregateExpression createCountFunction(boolean distinct, List<Expression> countTupleArguments) { countTupleArguments.add(0, new StringLiteral(AbstractCountFunction.FUNCTION_NAME.toUpperCase())); if (distinct) { countTupleArguments.add(1, new StringLiteral(AbstractCountFunction.DISTINCT_QUALIFIER)); } return new AggregateExpression(false, "FUNCTION", countTupleArguments); } private SubqueryExpression generateSubquery(PathExpression sizeArg) { JoinNode sizeArgJoin = (JoinNode) sizeArg.getBaseNode(); final Type<?> nodeType = sizeArgJoin.getNodeType(); if (!(nodeType instanceof EntityType<?>)) { throw new IllegalArgumentException("Size on a collection owned by a non-entity type is not supported yet: " + sizeArg); } final EntityType<?> startType = (EntityType<?>) nodeType; Subquery countSubquery = (Subquery) subqueryInitFactory.createSubqueryInitiator(null, new SubqueryBuilderListenerImpl<>(), false, getClause()) .from(sizeArg.getPathReference().toString()) .select("COUNT(*)"); for (SingularAttribute<?, ?> idAttribute : JpaMetamodelUtils.getIdAttributes(startType)) { String groupByExprString = sizeArgJoin.getAlias() + "." + idAttribute.getName(); ResolvedExpression groupByExpr = new ResolvedExpression(groupByExprString, null); Set<ClauseType> clauseTypes = subqueryGroupBys.get(groupByExpr); if (clauseTypes == null) { List<PathElementExpression> pathElementExpressions = new ArrayList<>(2); pathElementExpressions.add(new PropertyExpression(sizeArgJoin.getAlias())); pathElementExpressions.add(new PropertyExpression(idAttribute.getName())); PathExpression pathExpression = new PathExpression(pathElementExpressions); pathExpression.setPathReference(new SimplePathReference(sizeArgJoin, idAttribute.getName(), metamodel.type(JpaMetamodelUtils.resolveFieldClass(startType.getJavaType(), idAttribute)))); groupByExpr = new ResolvedExpression(groupByExprString, pathExpression); subqueryGroupBys.put(groupByExpr, EnumSet.of(clause)); } else { clauseTypes.add(clause); } } return new SubqueryExpression(countSubquery); } private Expression wrapSubqueryConditionally(SubqueryExpression subquery, boolean wrap) { if (wrap) { // we need to wrap subqueries in aggregate functions in COALESCE to trick the Hibernate parser // see https://hibernate.atlassian.net/browse/HHH-9331 List<Expression> subqueryFunctionArguments = new ArrayList<>(1); subqueryFunctionArguments.add(new StringLiteral(SubqueryFunction.FUNCTION_NAME)); subqueryFunctionArguments.add(subquery); return new FunctionExpression("FUNCTION", subqueryFunctionArguments); } else { return subquery; } } /** * @author <NAME> * @since 1.2.0 */ private static class TransformedExpressionEntry { private final AggregateExpression transformedExpression; private final PathExpression originalSizeArg; private final ExpressionModifier parentModifier; private final boolean aggregateFunctionContext; public TransformedExpressionEntry(AggregateExpression transformedExpression, PathExpression originalSizeArg, ExpressionModifier parentModifier, boolean aggregateFunctionContext) { this.transformedExpression = transformedExpression; this.originalSizeArg = originalSizeArg; this.parentModifier = parentModifier; this.aggregateFunctionContext = aggregateFunctionContext; } public AggregateExpression getTransformedExpression() { return transformedExpression; } public PathExpression getOriginalSizeArg() { return originalSizeArg; } public ExpressionModifier getParentModifier() { return parentModifier; } public boolean isAggregateFunctionContext() { return aggregateFunctionContext; } } /** * @author <NAME> * @since 1.2.0 */ static class LateJoinEntry { private final EnumSet<ClauseType> clauseDependencies = EnumSet.noneOf(ClauseType.class); private final List<Expression> expressionsToJoin = new ArrayList<>(); public EnumSet<ClauseType> getClauseDependencies() { return clauseDependencies; } public List<Expression> getExpressionsToJoin() { return expressionsToJoin; } } }
HeRaNO/OI-ICPC-Codes
HSAHRBNUOJ/P27xx/P2711.cpp
<filename>HSAHRBNUOJ/P27xx/P2711.cpp #include <cstdio> #define MAXN 110 using namespace std; struct point { int x; int y; }; point a[MAXN]; int n, ans; bool judge(int p, int q, int r) { if (a[p].x == a[q].x && a[q].x == a[r].x) return false; if (a[p].y == a[q].y && a[q].y == a[r].y) return false; if ((a[r].y - a[q].y) * (a[q].x - a[p].x) == (a[r].x - a[q].x) * (a[q].y - a[p].y)) return false; return true; } int main() { scanf("%d", &n); for (int i = 1; i <= n; i++) scanf("%d %d", &a[i].x, &a[i].y); for (int i = 1; i <= n; i++) for (int j = i + 1; j <= n; j++) for (int k = j + 1; k <= n; k++) if (judge(i, j, k)) ans++; printf("%d\n", ans); return 0; }
kbore/pbis-open
lsass/server/api/rpc_server.c
<reponame>kbore/pbis-open<gh_stars>100-1000 /* Editor Settings: expandtabs and use 4 spaces for indentation * ex: set softtabstop=4 tabstop=8 expandtab shiftwidth=4: * */ /* * Copyright © BeyondTrust Software 2004 - 2019 * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * BEYONDTRUST MAKES THIS SOFTWARE AVAILABLE UNDER OTHER LICENSING TERMS AS * WELL. IF YOU HAVE ENTERED INTO A SEPARATE LICENSE AGREEMENT WITH * BEYONDTRUST, THEN YOU MAY ELECT TO USE THE SOFTWARE UNDER THE TERMS OF THAT * SOFTWARE LICENSE AGREEMENT INSTEAD OF THE TERMS OF THE APACHE LICENSE, * NOTWITHSTANDING THE ABOVE NOTICE. IF YOU HAVE QUESTIONS, OR WISH TO REQUEST * A COPY OF THE ALTERNATE LICENSING TERMS OFFERED BY BEYONDTRUST, PLEASE CONTACT * BEYONDTRUST AT beyondtrust.com/contact */ /* * Copyright (C) BeyondTrust Software. All rights reserved. * * Module Name: * * rpc_server.c * * Abstract: * * BeyondTrust Security and Authentication Subsystem (LSASS) * * Remote Procedure Call (RPC) Server Interface * * Authors: <NAME> (<EMAIL>) */ #include "api.h" static DWORD LsaStartRpcSrv( PLSA_RPC_SERVER pRpc ); static DWORD LsaStopRpcSrv( PLSA_RPC_SERVER pRpc ); DWORD LsaCheckInvalidRpcServer( PVOID pSymbol, PCSTR pszLibPath ) { DWORD dwError = 0; PCSTR pszError = NULL; if (pSymbol == NULL) { LSA_LOG_ERROR("Ignoring invalid rpc server at path [%s]", (pszLibPath ? pszLibPath : "(unknown)")); pszError = dlerror(); if (!LW_IS_NULL_OR_EMPTY_STR(pszError)) { LSA_LOG_ERROR("%s", pszError); } dwError = LW_ERROR_INVALID_RPC_SERVER; BAIL_ON_LSA_ERROR(dwError); } cleanup: return dwError; error: goto cleanup; } DWORD LsaSrvInitRpcServer( PLSA_RPC_SERVER pRpc ) { DWORD dwError = 0; PFNINITIALIZERPCSRV pfnInitRpc = NULL; PCSTR pszError = NULL; PCSTR pszSrvLibPath = NULL; if (LW_IS_NULL_OR_EMPTY_STR(pRpc->pszSrvLibPath)) { dwError = ERROR_FILE_NOT_FOUND; BAIL_ON_LSA_ERROR(dwError); } pszSrvLibPath = pRpc->pszSrvLibPath; dlerror(); pRpc->phLib = dlopen(pszSrvLibPath, RTLD_NOW | RTLD_LOCAL); if (pRpc->phLib == NULL) { LSA_LOG_ERROR("Failed to open rpc server at path [%s]", pszSrvLibPath); pszError = dlerror(); if (!LW_IS_NULL_OR_EMPTY_STR(pszError)) { LSA_LOG_ERROR("%s", pszError); } dwError = LW_ERROR_INVALID_RPC_SERVER; BAIL_ON_LSA_ERROR(dwError); } dlerror(); pfnInitRpc = (PFNINITIALIZERPCSRV)dlsym( pRpc->phLib, LSA_SYMBOL_NAME_INITIALIZE_RPCSRV); dwError = LsaCheckInvalidRpcServer( pfnInitRpc, pszSrvLibPath); BAIL_ON_LSA_ERROR(dwError); dlerror(); pRpc->pfnShutdownSrv = (PFNSHUTDOWNRPCSRV)dlsym( pRpc->phLib, LSA_SYMBOL_NAME_SHUTDOWN_RPCSRV); dwError = LsaCheckInvalidRpcServer( pRpc->pfnShutdownSrv, pszSrvLibPath); BAIL_ON_LSA_ERROR(dwError); dwError = pfnInitRpc( &pRpc->pszName, &pRpc->pfnTable); BAIL_ON_LSA_ERROR(dwError); dwError = LsaValidateRpcServer(pRpc); BAIL_ON_LSA_ERROR(dwError); cleanup: return dwError; error: goto cleanup; } DWORD LsaSrvInitRpcServers( VOID ) { DWORD dwError = 0; PLSA_RPC_SERVER pRpc = NULL; PLSA_RPC_SERVER pUninitializedRpcList = NULL; PLSA_RPC_SERVER pRpcList = NULL; BOOLEAN bLocked = TRUE; dwError = LsaRpcReadRegistry(&pUninitializedRpcList); BAIL_ON_LSA_ERROR(dwError); while (pUninitializedRpcList) { pRpc = pUninitializedRpcList; pUninitializedRpcList = pUninitializedRpcList->pNext; pRpc->pNext = NULL; dwError = LsaSrvInitRpcServer(pRpc); if (dwError) { LSA_LOG_ERROR("Failed to load rpc server [%s] at [%s] [error code:%u]", (pRpc->pszName ? pRpc->pszName : "<null>"), (pRpc->pszSrvLibPath ? pRpc->pszSrvLibPath : "<null>"), dwError); LsaSrvFreeRpcServer(pRpc); pRpc = NULL; dwError = 0; } else { LsaSrvAppendRpcServerList(pRpc, &pRpcList); pRpc = NULL; } } ENTER_RPC_SERVER_LIST_WRITER_LOCK(bLocked); LsaSrvFreeRpcServerList(gpRpcServerList); gpRpcServerList = pRpcList; pRpcList = NULL; LsaStartRpcServers(gpRpcServerList); LEAVE_RPC_SERVER_LIST_WRITER_LOCK(bLocked); /* Start rpc service control worker thread to start listening for incoming rpc calls */ dwError = RpcSvcStartWorker(); BAIL_ON_LSA_ERROR(dwError); cleanup: if (pUninitializedRpcList) { LsaSrvFreeRpcServerListWithoutStopping(pUninitializedRpcList); } return dwError; error: if (pRpcList) { LsaSrvFreeRpcServerList(pRpcList); } goto cleanup; } void LsaStartRpcServers( PLSA_RPC_SERVER pRpcServerList ) { PLSA_RPC_SERVER pRpc = NULL; while (pRpcServerList) { pRpc = pRpcServerList; pRpcServerList = pRpcServerList->pNext; LsaStartRpcSrv(pRpc); } } static DWORD LsaStartRpcSrv( PLSA_RPC_SERVER pRpc ) { DWORD dwError = 0; dwError = pRpc->pfnTable->pfnStart(); if (dwError) { LSA_LOG_ERROR("Couldn't start %s rpc server (error: %u)", pRpc->pszName, dwError); } else { LSA_LOG_INFO("%s rpc server successfully started", pRpc->pszName); } return dwError; } void LsaStopRpcServers( PLSA_RPC_SERVER pRpcServerList ) { PLSA_RPC_SERVER pRpc = NULL; while (pRpcServerList) { pRpc = pRpcServerList; pRpcServerList = pRpcServerList->pNext; LsaStopRpcSrv(pRpc); } } static DWORD LsaStopRpcSrv( PLSA_RPC_SERVER pRpc ) { DWORD dwError = 0; dwError = pRpc->pfnTable->pfnStop(); if (dwError) { LSA_LOG_ERROR("Couldn't stop %s rpc server (error: %u)", pRpc->pszName, dwError); } else { LSA_LOG_INFO("%s rpc server successfully stopped", pRpc->pszName); } return dwError; } DWORD LsaValidateRpcServer( PLSA_RPC_SERVER pRpc ) { DWORD dwError = 0; if (pRpc == NULL || pRpc->pfnTable == NULL || !pRpc->pfnTable->pfnStart || !pRpc->pfnTable->pfnStop) { dwError = LW_ERROR_INVALID_RPC_SERVER; } return dwError; } static DWORD LsaRpcReadServer( PCSTR pszServerName, PCSTR pszServerKey, PLSA_RPC_SERVER *ppRpcSrvList ) { DWORD dwError = 0; PLSA_RPC_SERVER pRpcSrv = NULL; PSTR pszPath = NULL; LWREG_CONFIG_ITEM Config[] = { { "Path", FALSE, LwRegTypeString, 0, MAXDWORD, NULL, &pszPath, NULL }, }; dwError = RegProcessConfig( pszServerKey, pszServerKey, Config, sizeof(Config)/sizeof(Config[0])); BAIL_ON_LSA_ERROR(dwError); if (!LW_IS_NULL_OR_EMPTY_STR(pszPath)) { dwError = LwAllocateMemory( sizeof(LSA_RPC_SERVER), (PVOID*)&pRpcSrv); BAIL_ON_LSA_ERROR(dwError); pRpcSrv->pszSrvLibPath = pszPath; pszPath = NULL; dwError = LwAllocateString(pszServerName, &pRpcSrv->pszName); BAIL_ON_LSA_ERROR(dwError); LsaSrvAppendRpcServerList(pRpcSrv, ppRpcSrvList); pRpcSrv = NULL; } cleanup: return dwError; error: LW_SAFE_FREE_STRING(pszPath); if (pRpcSrv) { LsaSrvFreeRpcServer(pRpcSrv); pRpcSrv = NULL; } goto cleanup; } DWORD LsaRpcReadRegistry( PLSA_RPC_SERVER *ppRpcSrvList ) { DWORD dwError = 0; PSTR pszServers = NULL; PSTR pszServerKey = NULL; PSTR pszServer = NULL; LWREG_CONFIG_ITEM Config[] = { { "LoadOrder", FALSE, LwRegTypeMultiString, 0, MAXDWORD, NULL, &pszServers, NULL }, }; BAIL_ON_INVALID_POINTER(ppRpcSrvList); dwError = RegProcessConfig( "Services\\lsass\\Parameters\\RPCServers", "Policy\\Services\\lsass\\Parameters\\RPCServers", Config, sizeof(Config)/sizeof(Config[0])); BAIL_ON_LSA_ERROR(dwError); if (LW_IS_NULL_OR_EMPTY_STR(pszServers) ) { goto error; } pszServer = pszServers; while (pszServer != NULL && *pszServer != '\0') { dwError = LwAllocateStringPrintf( &pszServerKey, "Services\\lsass\\Parameters\\RpcServers\\%s", pszServer); BAIL_ON_LSA_ERROR(dwError); dwError = LsaRpcReadServer( pszServer, pszServerKey, ppRpcSrvList); BAIL_ON_LSA_ERROR(dwError); LW_SAFE_FREE_STRING(pszServerKey); pszServer = pszServer + strlen(pszServer) + 1; } cleanup: LW_SAFE_FREE_STRING(pszServers); LW_SAFE_FREE_STRING(pszServerKey); return dwError; error: goto cleanup; } VOID LsaSrvAppendRpcServerList( PLSA_RPC_SERVER pRpcServer, PLSA_RPC_SERVER *ppRpcServerList ) { if (ppRpcServerList) { if (!*ppRpcServerList) { *ppRpcServerList = pRpcServer; } else { PLSA_RPC_SERVER pCurrent = *ppRpcServerList; while (pCurrent->pNext) { pCurrent = pCurrent->pNext; } pCurrent->pNext = pRpcServer; } } } void LsaSrvFreeRpcServer( PLSA_RPC_SERVER pSrv ) { if (pSrv == NULL) return; if (pSrv->pfnShutdownSrv) { pSrv->pfnShutdownSrv( pSrv->pszName, pSrv->pfnTable); } if (pSrv->phLib) { dlclose(pSrv->phLib); } LW_SAFE_FREE_STRING(pSrv->pszSrvLibPath); LW_SAFE_FREE_MEMORY(pSrv); } void LsaSrvFreeRpcServerList( PLSA_RPC_SERVER pRpcServerList ) { PLSA_RPC_SERVER pRpc = NULL; LsaStopRpcServers(pRpcServerList); while (pRpcServerList) { pRpc = pRpcServerList; pRpcServerList = pRpcServerList->pNext; LsaSrvFreeRpcServer(pRpc); pRpc = NULL; } } void LsaSrvFreeRpcServerListWithoutStopping( PLSA_RPC_SERVER pRpcServerList ) { PLSA_RPC_SERVER pRpc = NULL; while (pRpcServerList) { pRpc = pRpcServerList; pRpcServerList = pRpcServerList->pNext; LsaSrvFreeRpcServer(pRpc); pRpc = NULL; } } VOID LsaSrvFreeRpcServers( VOID ) { BOOLEAN bLocked = FALSE; ENTER_RPC_SERVER_LIST_WRITER_LOCK(bLocked); LsaSrvFreeRpcServerList(gpRpcServerList); gpRpcServerList = NULL; LEAVE_RPC_SERVER_LIST_WRITER_LOCK(bLocked); } /* local variables: mode: c c-basic-offset: 4 indent-tabs-mode: nil tab-width: 4 end: */
gnestor/plotly.py
_plotly_utils/tests/validators/test_imageuri_validator.py
import base64 import pytest from _plotly_utils.basevalidators import ImageUriValidator import numpy as np from PIL import Image # Fixtures # -------- @pytest.fixture() def validator(): return ImageUriValidator('prop', 'parent') # Tests # ----- # ### Acceptance ### @pytest.mark.parametrize('val', [ 'http://somewhere.com/images/image12.png', 'data:image/png;base64,iVBORw0KGgoAAAANSU', ]) def test_validator_acceptance(val, validator): assert validator.validate_coerce(val) == val # ### Coercion from PIL Image ### def test_validator_coercion_PIL(validator): # Single pixel black png (http://png-pixel.com/) img_path = '_plotly_utils/tests/resources/1x1-black.png' with open(img_path, 'rb') as f: hex_bytes = base64.b64encode(f.read()).decode('ascii') expected_uri = 'data:image/png;base64,' + hex_bytes img = Image.open(img_path) coerce_val = validator.validate_coerce(img) assert coerce_val == expected_uri # ### Rejection ### @pytest.mark.parametrize('val', [ 23, set(), [] ]) def test_rejection_by_type(val, validator): with pytest.raises(ValueError) as validation_failure: validator.validate_coerce(val) assert 'Invalid value' in str(validation_failure.value)
wqking/gincu
src/allegro/gallegrotexturedata.h
<gh_stars>10-100 #ifndef GALLEGROTEXTUREDATA_H #define GALLEGROTEXTUREDATA_H #include "gincu/gtexture.h" #include "gincu/glog.h" #include "allegro5/allegro.h" #include <string> namespace gincu { class GAllegroTextureData : public GTextureData { public: GAllegroTextureData() : image(nullptr) { } ~GAllegroTextureData() { this->close(); } virtual void load(const std::string & fileName) override { this->fileName = fileName; this->reload(); } virtual void reload() override { this->close(); this->image = al_load_bitmap(this->fileName.c_str()); if(this->image == nullptr) { G_LOG_ERROR("Can't load image %s", this->fileName.c_str()); } } virtual GSize getSize() const override { if(this->image != nullptr) { return { (GCoord)al_get_bitmap_width(this->image), (GCoord)al_get_bitmap_height(this->image) }; } else { return GSize(); } } virtual bool isValid() const override { return this->image != nullptr; } void close() { if(this->image != nullptr) { al_destroy_bitmap(this->image); } } std::string fileName; ALLEGRO_BITMAP * image; }; } //namespace gincu #endif
Rafat97/nodejs-MVC
bootstrap/config/route.js
"use strict"; function route (type, url, controllerData) { return { url: url, fun: type, controller: controllerData, typeof_parem:typeof controllerData, }; }; module.exports = route;
BIJOY-SUST/sust-medical-center
cse/migrations/0047_auto_20190612_1124.py
# Generated by Django 2.2 on 2019-06-12 11:24 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('cse', '0046_medicineinfo'), ] operations = [ migrations.AlterField( model_name='medicineinfo', name='medicineafter', field=models.IntegerField(blank=True, default=None, null=True), ), ]
VisualGMQ/Chaos_Dungeon
src/Sprite/WorldModel.cpp
<filename>src/Sprite/WorldModel.cpp #include "Sprite/WorldModel.hpp" Layer::Layer(string name){ this->name = name; } void Layer::AddGameObject(GameObject* obj){ auto it = objs.begin(); for(;it!=objs.end();it++) if((*it)->GetID()==obj->GetID()) return; objs.push_back(obj); } void Layer::RemoveObject(IDType id){ will_remove.push_back(id); } void Layer::TakeAway(IDType id){ will_takeaway.push_back(id); } void Layer::TakeAwayAll(){ objs.clear(); } void Layer::JunkRecycle(){ takeAwayRecycle(); removeRecycle(); } void Layer::EnableOrder(){ isorder = true; } void Layer::DisableOrder(){ isorder = false; } bool Layer::IsOrderDraw() const{ return isorder; } void Layer::Update(){ JunkRecycle(); if(isorder){ objs.sort([](const GameObject* o1, const GameObject* o2){ return o1->Position().y>=o2->Position().y; }); } for(GameObject* obj : objs) obj->Update(); } string Layer::GetName() const{ return name; } void Layer::SetName(string n) { name = n; } void Layer::Clear(){ while(!objs.empty()) { objs.back()->DeleteSelf(); objs.pop_back(); } } void Layer::takeAwayRecycle(){ while(!will_takeaway.empty()){ IDType id = will_takeaway.back(); will_takeaway.pop_back(); for(auto it=objs.begin();it!=objs.end();it++) if((*it)->GetID()==id){ objs.erase(it); break; } } } void Layer::removeRecycle(){ while(!will_remove.empty()){ IDType id = will_remove.back(); will_remove.pop_back(); for(auto it=objs.begin();it!=objs.end();it++) if((*it)->GetID()==id){ objs.erase(it); (*it)->DeleteSelf(); break; } } } Layer::~Layer(){ Clear(); } WorldModel* WorldModel::instance = nullptr; list<string> WorldModel::willdel_list; list<string> WorldModel::will_takeaway; bool WorldModel::clear_all = false; WorldModel* WorldModel::GetInstance(){ if(!instance){ instance = new WorldModel; } return instance; } WorldModel::WorldModel(){ CreateLayer("main"); } void WorldModel::Destroy(){ delete instance; } void WorldModel::AddGameObject(string layer, GameObject* obj){ GetLayer(layer)->AddGameObject(obj); } void WorldModel::Clear(){ for(auto it=layers.begin();it!=layers.end();it++) (*it)->Clear(); } void WorldModel::ClearWorldModel(){ for(Layer* layer : layers) delete layer; layers.clear(); } void WorldModel::TakeAwayAll(){ for(Layer* layer : layers) layer->TakeAwayAll(); } void WorldModel::TakeAway(string layer, IDType id){ GetLayer(layer)->TakeAway(id); } void WorldModel::TakeAway(IDType id){ for(Layer* layer : layers) layer->TakeAway(id); } void WorldModel::DeleteLayer(string name){ willdel_list.push_back(name); } void WorldModel::DeleteElem(string layer, IDType id){ GetLayer(layer)->RemoveObject(id); } void WorldModel::TakeAwayLayer(string name){ will_takeaway.push_back(name); } void WorldModel::MoveLayer(string name, int pos){ if(layers.empty()) return; Layer* layer = GetLayer(name); int idx = GetLayerIdx(name); for(auto it=layers.begin();it!=layers.end();it++) if((*it)->GetName()==name){ layers.erase(it); break; } if(pos>=layers.size()+1) layers.push_back(layer); else if(pos<=idx){ auto it = layers.begin(); std::advance(it, pos); layers.insert(it, layer); }else{ auto it = layers.begin(); std::advance(it, pos-1); layers.insert(it, layer); } } void WorldModel::DeleteElem(IDType id){ for(Layer* layer : layers) layer->RemoveObject(id); } Layer* WorldModel::GetLayer(string name){ for(auto it=layers.begin();it!=layers.end();it++) if((*it)->GetName()==name) return *it; cerr<<"layer"<<name<<" not exists"<<endl; return nullptr; } void WorldModel::Update(){ for(Layer* layer : layers) layer->Update(); } int WorldModel::GetLayerIdx(string name) const{ int i=0; for(Layer* layer : layers){ if(name==layer->GetName()) return i; i++; } return -1; } Layer* WorldModel::CreateLayer(string name, bool isorder){ if(!ExistsLayer(name)){ Layer* newlayer = new Layer(name); if(isorder) newlayer->EnableOrder(); layers.push_back(newlayer); return newlayer; }else{ cerr<<"warning: layer "<<name<<" already exists"<<endl; return nullptr; } } bool WorldModel::ExistsLayer(string name){ for(auto it=layers.begin();it!=layers.end();it++) if((*it)->GetName()==name) return true; return false; } void WorldModel::junkRecycle(){ while(!will_takeaway.empty()){ string name = will_takeaway.back(); will_takeaway.pop_back(); for(auto it=layers.begin();it!=layers.end();it++){ if((*it)->GetName()==name){ layers.erase(it); break; } } } if(clear_all){ while(!layers.empty()){ delete layers.back(); layers.pop_back(); } clear_all = false; }else{ while(!willdel_list.empty()){ string name = willdel_list.back(); willdel_list.pop_back(); for(auto i=layers.begin();i!=layers.end();i++){ if((*i)->GetName()==name){ delete *i; layers.erase(i); break; } } } } } WorldModel::~WorldModel(){ ClearWorldModel(); }
nottalha/Box
client/src/components/reviewcomment/index.js
<filename>client/src/components/reviewcomment/index.js import React, { Component } from 'react' import {Link} from 'react-router-dom' class Comment extends Component{ sumitHandler ( e ) { e.preventDefault() } render() { return( <div className="add-review-sec"> <h3>Add a Review</h3> <h4><span>Your email address will not be published.</span>Required fields are marked*</h4> <h5>Your Rating :</h5> <ul className="rating"> <li><Link to="/shop-single" title=""><i className="fa fa-star-o"></i></Link></li> <li><Link to="/shop-single" title=""><i className="fa fa-star-o"></i></Link></li> <li><Link to="/shop-single" title=""><i className="fa fa-star-o"></i></Link></li> <li><Link to="/shop-single" title=""><i className="fa fa-star-o"></i></Link></li> <li><Link to="/shop-single" title=""><i className="fa fa-star-o"></i></Link></li> </ul> <div className="clearfix"></div> <form onSubmit={this.sumitHandler} className="review-form"> <div className="form-field"> <input type="text" name="name" placeholder="Name"/> </div> <div className="form-field"> <input type="email" name="email" placeholder="Email"/> </div> <div className="form-field"> <textarea name="review" placeholder="Your Review*"></textarea> </div> <div className="form-submit"> <button type="submit" className="theme-btn">Submit</button> </div> </form> </div> ) } } export default Comment;
knowac/tizen-browser-30
services/SettingsUI/SettingsAFProfile.h
<reponame>knowac/tizen-browser-30<gh_stars>1-10 /* * Copyright (c) 2016 Samsung Electronics Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the License); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an AS IS BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef SETTINGSAFPROFILE_MOB_H_ #define SETTINGSAFPROFILE_MOB_H_ #include "SettingsUI.h" #include <Elementary.h> #include <boost/concept_check.hpp> #include <string.h> #include <stdio.h> #include <vector> #include <Evas.h> #include "BrowserLogger.h" #include "Tools/EflTools.h" #include "Tools/SettingsEnums.h" #include <EWebKit.h> #include <EWebKit_internal.h> namespace tizen_browser{ namespace base_ui{ class SettingsAFProfile : public SettingsUI { public: SettingsAFProfile(){}; SettingsAFProfile(Evas_Object* parent); virtual ~SettingsAFProfile(); virtual bool populateList(Evas_Object* genlist); virtual void updateButtonMap(); static void _select_profile_cb(void* data, Evas_Object*, void*); protected: Ewk_Autofill_Profile* m_profile; std::shared_ptr<ItemData> m_itemData; std::string m_profileName; }; } } #endif /* SETTINGSAFPROFILE_MOB_H_ */
matty234/client
shared/signup/phone-number/phone-input.js
// @flow import * as React from 'react' import * as Kb from '../../common-adapters' import * as Styles from '../../styles' import {isIOS} from '../../constants/platform' import {countryData, AsYouTypeFormatter, validateNumber} from '../../util/phone-numbers/' import {memoize} from '../../util/memoize' const getCallingCode = countryCode => countryData[countryCode].callingCode const getPlaceholder = countryCode => 'Ex: ' + countryData[countryCode].example const filterNumeric = text => text.replace(/[^0-9]/g, '') const defaultCountry = 'US' const pickerItems = memoize(countryData => Object.values(countryData) .sort((a: any, b: any) => a.name.localeCompare(b.name)) .map((cd: any) => ({label: cd.pickerText, value: cd.alpha2})) ) const menuItems = memoize((countryData, onClick) => Object.values(countryData) .sort((a: any, b: any) => a.name.localeCompare(b.name)) .map((cd: any) => ({ onClick: () => onClick(cd.alpha2), title: cd.pickerText, view: <MenuItem text={cd.pickerText} />, })) ) const MenuItem = props => ( <Kb.Box2 direction="horizontal" centerChildren={true} style={styles.menuItem}> <Kb.Text type="BodySemibold" center={true}> {props.text} </Kb.Text> </Kb.Box2> ) type Props = { defaultCountry?: string, // TODO get this from core. ISO 3166-1 alpha-2 format (e.g. 'US') error: string, onChangeNumber: (number: string) => void, // E.164 format (e.g. '+18002667883'). onChangeValidity: boolean => void, style?: Styles.StylesCrossPlatform, } type State = { country: string, formatted: string, } class _PhoneInput extends React.Component<Kb.PropsWithOverlay<Props>, State> { state = {country: this.props.defaultCountry || defaultCountry, formatted: ''} _formatter = new AsYouTypeFormatter(this.props.defaultCountry || defaultCountry) _setFormatted = formatted => this.setState(s => { if (s.formatted === formatted) { return null } return {formatted} }, this._updateParent) // AsYouTypeFormatter doesn't support backspace // To get around this, on every text change: // 1. Clear the formatter // 2. Remove any non-numerics from the text // 3. Feed the new text into the formatter char by char // 4. Set the value of the input to the new formatted _reformat = _newText => { this._formatter.clear() const newText = filterNumeric(_newText) if (newText.trim().length === 0) { this._setFormatted('') return } for (let i = 0; i < newText.length - 1; i++) { this._formatter.inputDigit(newText[i]) } const formatted = this._formatter.inputDigit(newText[newText.length - 1]) this._setFormatted(formatted) } _updateParent = () => { const validation = validateNumber(this.state.formatted, this.state.country) this.props.onChangeNumber(validation.e164) this.props.onChangeValidity(validation.valid) } _setCountry = country => { if (this.state.country !== country) { this.setState({country}) this._formatter = new AsYouTypeFormatter(country) this._reformat('') } } render() { return ( <> <Kb.Box2 alignItems="center" direction="horizontal" style={Styles.collapseStyles([styles.container, this.props.style])} > <Kb.ClickableBox onClick={this.props.toggleShowingMenu} style={styles.fullHeight}> <Kb.Box2 direction="horizontal" style={styles.callingCodeContainer} alignItems="center" fullHeight={true} gap="small" ref={this.props.setAttachmentRef} > <Kb.Text type="BodySemibold">{getCallingCode(this.state.country)}</Kb.Text> <Kb.Icon type="iconfont-caret-down" sizeType="Small" /> </Kb.Box2> </Kb.ClickableBox> <Kb.PlainInput autoFocus={true} style={styles.input} flexable={true} keyboardType={isIOS ? 'number-pad' : 'numeric'} placeholder={getPlaceholder(this.state.country)} onChangeText={this._reformat} value={this.state.formatted} /> </Kb.Box2> <CountrySelector attachTo={this.props.getAttachmentRef} onSelect={this._setCountry} onHidden={this.props.toggleShowingMenu} selected={this.state.country} visible={this.props.showingMenu} /> </> ) } } const PhoneInput = Kb.OverlayParentHOC(_PhoneInput) type CountrySelectorProps = {| attachTo: () => ?React.Component<any>, onSelect: string => void, onHidden: () => void, selected: string, visible: boolean, |} type CountrySelectorState = {| selected: string, |} class CountrySelector extends React.Component<CountrySelectorProps, CountrySelectorState> { state = {selected: this.props.selected} componentDidUpdate(prevProps: CountrySelectorProps) { if (this.props.selected !== prevProps.selected) { this._onSelect(this.props.selected) } } _onSelect = selected => this.setState(s => (s.selected === selected ? null : {selected})) _onCancel = () => { this._onSelect(this.props.selected) this.props.onHidden() } _onDone = () => { this.props.onSelect(this.state.selected) this.props.onHidden() } _onSelectMenu = selected => { this.props.onSelect(selected) } render() { if (!Styles.isMobile) { return ( <Kb.FloatingMenu closeOnSelect={true} containerStyle={{maxHeight: 160, width: 240}} items={menuItems(countryData, this._onSelectMenu)} onHidden={this.props.onHidden} visible={this.props.visible} attachTo={this.props.attachTo} /> ) } return ( <Kb.FloatingPicker items={pickerItems(countryData)} onSelect={this._onSelect} onHidden={this._onCancel} onCancel={this._onCancel} onDone={this._onDone} selectedValue={this.state.selected} visible={this.props.visible} /> ) } } const styles = Styles.styleSheetCreate({ callingCodeContainer: { ...Styles.padding(0, Styles.globalMargins.xsmall), borderRightColor: Styles.globalColors.black_10, borderRightWidth: 1, borderStyle: 'solid', }, container: { backgroundColor: Styles.globalColors.white, borderColor: Styles.globalColors.black_10, borderRadius: Styles.borderRadius, borderStyle: 'solid', borderWidth: 1, }, fullHeight: {height: '100%'}, input: Styles.platformStyles({ isElectron: { ...Styles.padding(0, Styles.globalMargins.xsmall), }, isMobile: { ...Styles.padding(0, Styles.globalMargins.small), }, }), menuItem: { ...Styles.padding(Styles.globalMargins.tiny, Styles.globalMargins.medium), }, }) export default PhoneInput
andela/ah-codeblooded-frontend
src/containers/RatingStats/state/action.test.js
<reponame>andela/ah-codeblooded-frontend<filename>src/containers/RatingStats/state/action.test.js import thunk from 'redux-thunk'; import configureMockStore from 'redux-mock-store'; import MockAdapter from 'axios-mock-adapter'; import api from '../../../utils/api'; import config from '../../../utils/config'; import { fetchingRatings, fetchingRatingSuccess, fetchingRatingsFailure, fetchRatingsAction, } from './actions'; import { FETCH_RATINGS, FETCH_RATINGS_SUCCESS, FETCH_RATINGS_FAILURE } from './types'; const rating = { rating: { rating: 5 } }; const middleware = [thunk]; const mockStore = configureMockStore(middleware); let store; describe('Rating actions', () => { const mock = new MockAdapter(api); const slug = 'fake-slug-md23'; const url = `${config.BASE_URL}articles/${slug}/ratings/`; store = mockStore({}); afterEach(() => { store.clearActions(); mock.reset(); }); it('should dispatch FETCH_RATINGS', () => { store.dispatch(fetchingRatings()); expect(store.getActions()).toEqual([{ type: FETCH_RATINGS }]); }); it('should dispatch FETCH_RATINGS_SUCCESS', () => { store.dispatch(fetchingRatingSuccess()); expect(store.getActions()).toEqual([{ payload: undefined, type: FETCH_RATINGS_SUCCESS }]); }); it('should dispatch FETCH_RAITINGS_FAILURE', () => { store.dispatch(fetchingRatingsFailure()); expect(store.getActions()).toContainEqual({ type: FETCH_RATINGS_FAILURE }); }); it('should dispatch FETCH_RATINGS_SUCCESS when getting current rating', () => { mock.onGet(url).reply(201, rating); store .dispatch(fetchRatingsAction(slug)) .then(() => { expect(store.getActions()).toContainEqual({ type: FETCH_RATINGS_SUCCESS, payload: rating, }); }) .catch(() => {}); }); });
leboff/CumulusCI
cumulusci/tasks/metadata_etl/base.py
<reponame>leboff/CumulusCI from abc import ABCMeta, abstractmethod import enum from pathlib import Path import tempfile from urllib.parse import quote, unquote from cumulusci.core.exceptions import CumulusCIException, TaskOptionsError from cumulusci.core.tasks import BaseSalesforceTask from cumulusci.salesforce_api.metadata import ApiRetrieveUnpackaged from cumulusci.tasks.metadata.package import PackageXmlGenerator from cumulusci.core.utils import process_bool_arg, process_list_arg from cumulusci.utils import inject_namespace from cumulusci.core.config import TaskConfig from cumulusci.utils.xml import metadata_tree from cumulusci.utils.xml.metadata_tree import MetadataElement class MetadataOperation(enum.Enum): DEPLOY = "deploy" RETRIEVE = "retrieve" class BaseMetadataETLTask(BaseSalesforceTask, metaclass=ABCMeta): """Abstract base class for all Metadata ETL tasks. Concrete tasks should generally subclass BaseMetadataSynthesisTask, BaseMetadataTransformTask, or MetadataSingleEntityTransformTask.""" deploy = False retrieve = False task_options = { "managed": { "description": "If False, changes namespace_inject to replace tokens with a blank string" }, "namespace_inject": { "description": "If set, the namespace tokens in files and filenames are replaced with the namespace's prefix" }, "api_version": { "description": "Metadata API version to use, if not project__package__api_version." }, } def _init_options(self, kwargs): super()._init_options(kwargs) self.api_version = ( self.options.get("api_version") or self.project_config.project__package__api_version ) try: float(self.api_version) except ValueError: raise TaskOptionsError(f"Invalid API version {self.api_version}") self.options["namespace_inject"] = ( self.options.get("namespace_inject") or self.project_config.project__package__namespace ) # org_config might be None if we're freezing steps for metadeploy. # We can only autodetect the context for namespace injection if we have the org. if self.org_config: self._init_namespace_injection() def _init_namespace_injection(self): namespace = ( self.options.get("namespace_inject") or self.project_config.project__package__namespace ) if "managed" in self.options: self.options["managed"] = process_bool_arg(self.options["managed"] or False) else: self.options["managed"] = ( bool(namespace) and namespace in self.org_config.installed_packages ) if "namespaced_org" in self.options: self.options["namespaced_org"] = process_bool_arg( self.options["namespaced_org"] or False ) else: self.options["namespaced_org"] = ( bool(namespace) and namespace == self.org_config.namespace ) def _inject_namespace(self, text): """Inject the namespace into the given text if running in managed mode.""" # We might not have an org yet if this is called from _init_options # while freezing steps for metadeploy. if self.org_config is None: return text return inject_namespace( "", text, namespace=self.options["namespace_inject"], managed=self.options.get("managed") or False, namespaced_org=self.options.get("namespaced_org"), )[1] @abstractmethod def _get_package_xml_content(self, operation): """Return the textual content of a package.xml for the given operation.""" pass def _generate_package_xml(self, operation): """Call _get_package_xml_content() and perform namespace injection if needed""" return self._inject_namespace(self._get_package_xml_content(operation)) def _create_directories(self, tempdir): """Create self.retrieve_dir and self.deploy_dir, if required""" if self.retrieve: self.retrieve_dir = Path(tempdir, "retrieve") self.retrieve_dir.mkdir() if self.deploy: self.deploy_dir = Path(tempdir, "deploy") self.deploy_dir.mkdir() def _retrieve(self): """Retrieve metadata into self.retrieve_dir""" self.logger.info("Extracting existing metadata...") api_retrieve = ApiRetrieveUnpackaged( self, self._generate_package_xml(MetadataOperation.RETRIEVE), self.api_version, ) unpackaged = api_retrieve() unpackaged.extractall(self.retrieve_dir) @abstractmethod def _transform(self): """Transform the metadata in self.retrieve_dir into self.deploy_dir.""" pass def _deploy(self): """Deploy metadata from self.deploy_dir""" self.logger.info("Loading transformed metadata...") target_profile_xml = Path(self.deploy_dir, "package.xml") target_profile_xml.write_text( self._generate_package_xml(MetadataOperation.DEPLOY), encoding="utf-8" ) # import is here to avoid an import cycle from cumulusci.tasks.salesforce import Deploy api = Deploy( self.project_config, TaskConfig( { "options": { "path": self.deploy_dir, "namespace_inject": self.options.get("namespace_inject"), "unmanaged": not self.options["managed"], "namespaced_org": self.options["namespaced_org"], } } ), self.org_config, ) result = api() return result def _post_deploy(self, result): """Run any post-deploy logic required, such as waiting for asynchronous operations to complete in the target org.""" pass def _run_task(self): with tempfile.TemporaryDirectory() as tempdir: self._create_directories(tempdir) if self.retrieve: self._retrieve() self._transform() if self.deploy: result = self._deploy() self._post_deploy(result) class BaseMetadataSynthesisTask(BaseMetadataETLTask, metaclass=ABCMeta): """Base class for Metadata ETL tasks that generate new metadata and deploy it into the org, but do not retrieve.""" deploy = True def _generate_package_xml(self, deploy): """Synthesize a package.xml for generated metadata.""" generator = PackageXmlGenerator(str(self.deploy_dir), self.api_version) return generator() def _transform(self): self._synthesize() @abstractmethod def _synthesize(self): """Create new metadata in self.deploy_dir.""" pass class BaseMetadataTransformTask(BaseMetadataETLTask, metaclass=ABCMeta): """Base class for Metadata ETL tasks that extract metadata, transform it, and deploy it back into the org.""" retrieve = True deploy = True @abstractmethod def _get_entities(self): """Return a dict of Metadata API entities and API names to be transformed.""" pass def _get_types_package_xml(self): """Generate package.xml content based on the return value of _get_entities().""" base = """ <types> {members} <name>{name}</name> </types> """ types = "" for entity, api_names in self._get_entities().items(): members = "\n".join( f" <members>{api_name}</members>" for api_name in sorted(api_names) ) types += base.format(members=members, name=entity) return types def _get_package_xml_content(self, operation): return f"""<?xml version="1.0" encoding="UTF-8"?> <Package xmlns="http://soap.sforce.com/2006/04/metadata"> {self._get_types_package_xml()} <version>{self.api_version}</version> </Package> """ @abstractmethod def _transform(self): pass class MetadataSingleEntityTransformTask(BaseMetadataTransformTask, metaclass=ABCMeta): """Base class for a Metadata ETL task that affects one or more instances of a specific metadata entity. Concrete subclasses must set `entity` to the Metadata API entity transformed, and implement _transform_entity().""" entity = None task_options = { "api_names": {"description": "List of API names of entities to affect"}, **BaseMetadataETLTask.task_options, } def _init_options(self, kwargs): super()._init_options(kwargs) self.api_names = { self._inject_namespace(arg) for arg in process_list_arg(self.options.get("api_names", ["*"])) } self.api_names = { quote(arg, safe=" ") if arg != "*" else arg for arg in self.api_names } def _get_entities(self): return {self.entity: self.api_names} @abstractmethod def _transform_entity(self, metadata, api_name): """Accept an XML element corresponding to the metadata entity with the given api_name. Transform the XML and return the version which should be deployed, or None to suppress deployment of this entity.""" pass def _transform(self): # call _transform_entity once per retrieved entity # if the entity is an XML file, provide a parsed version # and write the returned metadata into the deploy directory parser = PackageXmlGenerator( None, self.api_version ) # We'll use it for its metadata_map entity_configurations = [ entry for entry in parser.metadata_map if any( [ subentry["type"] == self.entity for subentry in parser.metadata_map[entry] ] ) ] if not entity_configurations: raise CumulusCIException( f"Unable to locate configuration for entity {self.entity}" ) configuration = parser.metadata_map[entity_configurations[0]][0] if configuration["class"] not in [ "MetadataFilenameParser", "CustomObjectParser", ]: raise CumulusCIException( f"MetadataSingleEntityTransformTask only supports manipulating complete, file-based XML entities (not {self.entity})" ) extension = configuration["extension"] directory = entity_configurations[0] source_metadata_dir = self.retrieve_dir / directory if "*" in self.api_names: # Walk the retrieved directory to get the actual suite # of API names retrieved and rebuild our api_names list. self.api_names.remove("*") self.api_names = self.api_names.union( metadata_file.stem for metadata_file in source_metadata_dir.iterdir() if metadata_file.suffix == f".{extension}" ) removed_api_names = set() for api_name in self.api_names: # Page Layout names can contain spaces, but parentheses and other # characters like ' and < are quoted. # We quote user-specified API names so we can locate the corresponding # metadata files, but present them un-quoted in messages to the user. unquoted_api_name = unquote(api_name) path = source_metadata_dir / f"{api_name}.{extension}" if not path.exists(): raise CumulusCIException(f"Cannot find metadata file {path}") try: tree = metadata_tree.parse(str(path)) except SyntaxError as err: err.filename = path raise err transformed_xml = self._transform_entity(tree, unquoted_api_name) if transformed_xml: parent_dir = self.deploy_dir / directory if not parent_dir.exists(): parent_dir.mkdir() destination_path = parent_dir / f"{api_name}.{extension}" with destination_path.open(mode="w", encoding="utf-8") as f: f.write(transformed_xml.tostring(xml_declaration=True)) else: # Make sure to remove from our package.xml removed_api_names.add(api_name) self.api_names = self.api_names - removed_api_names class UpdateMetadataFirstChildTextTask(MetadataSingleEntityTransformTask): task_docs = """ Metadata ETL task to update a single child element's text within metadata XML. If the child doesn't exist, the child is created and appended to the Metadata. Furthermore, the ``value`` option is namespaced injected if the task is properly configured. Example: Assign a Custom Object's Compact Layout ------------------------------------------------ Researching `CustomObject <https://developer.salesforce.com/docs/atlas.en-us.api_meta.meta/api_meta/customobject.htm>`_ in the Metadata API documentation or even retrieving the CustomObject's Metadata for inspection, we see the ``compactLayoutAssignment`` Field. We want to assign a specific Compact Layout for our Custom Object, so we write the following CumulusCI task in our project's ``cumulusci.yml``. .. code-block:: yaml tasks: assign_compact_layout: class_path: cumulusci.tasks.metadata_etl.UpdateMetadataFirstChildTextTask options: managed: False namespace_inject: $project_config.project__package__namespace entity: CustomObject api_names: OurCustomObject__c tag: compactLayoutAssignment value: "%%%NAMESPACE%%%DifferentCompactLayout" # We include a namespace token so it's easy to use this task in a managed context. Suppose the original CustomObject metadata XML looks like: .. code-block:: xml <?xml version="1.0" encoding="UTF-8"?> <CustomObject xmlns="http://soap.sforce.com/2006/04/metadata"> ... <label>Our Custom Object</label> <compactLayoutAssignment>OriginalCompactLayout</compactLayoutAssignment> ... </CustomObject> After running ``cci task run assign_compact_layout``, the CustomObject metadata XML is deployed as: .. code-block:: xml <?xml version="1.0" encoding="UTF-8"?> <CustomObject xmlns="http://soap.sforce.com/2006/04/metadata"> ... <label>Our Custom Object</label> <compactLayoutAssignment>DifferentCompactLayout</compactLayoutAssignment> ... </CustomObject> """ task_options = { "metadata_type": {"description": "Metadata Type", "required": True}, "tag": { "description": "Targeted tag. The text of the first instance of this tag within the metadata entity will be updated.", "required": True, }, "value": { "description": "Desired value to set for the targeted tag's text. This value is namespace-injected.", "required": True, }, **MetadataSingleEntityTransformTask.task_options, } def _init_options(self, kwargs): super()._init_options(kwargs) self.entity = self.options.get("metadata_type") self.options["value"] = self._inject_namespace(self.options.get("value")) def _transform_entity( self, metadata: MetadataElement, api_name: str ) -> MetadataElement: """Finds metadata's first child with tag. If no child is found, appends a new child with tag. Then updates child's text as the value option.""" tag = self.options["tag"] child = metadata.find(tag) if child is None: child = metadata.append(tag) child.text = self.options["value"] self.logger.info(f'Updating {self.entity} "{api_name}":') self.logger.info(f' {tag} as "{child.text}"') return metadata
Sajaki/intellij-community
plugins/ui-designer/src/com/intellij/uiDesigner/propertyInspector/properties/AbstractDimensionProperty.java
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.uiDesigner.propertyInspector.properties; import com.intellij.uiDesigner.FormEditingUtil; import com.intellij.uiDesigner.core.GridConstraints; import com.intellij.uiDesigner.propertyInspector.Property; import com.intellij.uiDesigner.propertyInspector.PropertyEditor; import com.intellij.uiDesigner.propertyInspector.PropertyRenderer; import com.intellij.uiDesigner.propertyInspector.editors.IntRegexEditor; import com.intellij.uiDesigner.propertyInspector.renderers.DimensionRenderer; import com.intellij.uiDesigner.radComponents.RadComponent; import com.intellij.util.ui.JBUI; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.awt.*; /** * This class is a base for implementing such properties * as "minimum size", "preferred size" and "maximum size". * * @author <NAME> * @author <NAME> */ public abstract class AbstractDimensionProperty<T extends RadComponent> extends Property<T, Dimension> { private final Property[] myChildren; private final DimensionRenderer myRenderer; private final IntRegexEditor<Dimension> myEditor; public AbstractDimensionProperty(@NonNls final String name){ super(null, name); myChildren=new Property[]{ new IntFieldProperty(this, "width", -1, JBUI.emptySize()), new IntFieldProperty(this, "height", -1, JBUI.emptySize()), }; myRenderer = new DimensionRenderer(); myEditor = new IntRegexEditor<>(Dimension.class, myRenderer, new int[]{-1, -1}); } @Override public final Property @NotNull [] getChildren(final RadComponent component){ return myChildren; } @Override @NotNull public final PropertyRenderer<Dimension> getRenderer() { return myRenderer; } @Override public final PropertyEditor<Dimension> getEditor() { return myEditor; } @Override public Dimension getValue(T component) { return getValueImpl(component.getConstraints()); } protected abstract Dimension getValueImpl(final GridConstraints constraints); @Override public boolean isModified(final T component) { final Dimension defaultValue = getValueImpl(FormEditingUtil.getDefaultConstraints(component)); return !getValueImpl(component.getConstraints()).equals(defaultValue); } @Override public void resetValue(T component) throws Exception { setValueImpl(component, getValueImpl(FormEditingUtil.getDefaultConstraints(component))); } }
AugustinMascarelli/survol
survol/merge_scripts.py
<gh_stars>0 #!/usr/bin/env python """ Merge data from several sources """ import os import sys import time import cgi import lib_util import lib_common # This CGI script is called as a CGI script, # and its parameters are input URLs in Base64UrlSafe format. # It merges the input urls into a single RDF document, # then transformed into DOT, then SVG by Graphviz, then displayed. def Main(): origReqUri = lib_util.RequestUri() # It initialises an implicit global object similar. # When in the mode of global merging, the method "cgiEnv.OutCgiRdf()" does not generate anything, # but simply stores the new cgiEnv in a global list.. # The script loops on the URLs passed as CGI parameters. # The URLs are loaded and their content merged into the container lib_common.globalGraph lib_common.CgiEnvMergeMode() arguments = cgi.FieldStorage() # The display mode is read now, otherwise the CGI arguments are later destroyed, in this script. theMode = lib_util.GuessDisplayMode() DEBUG("theMode=%s",theMode) # Concatenation of error messages of each script. cumulatedError = "" # This logic might be needed in lib_client.py for urlfil in arguments.getlist("url"): # The parameters are coded in base64, although we leave the possibility not to encode them, # for compatibility with test scripts. complete_url = lib_util.Base64Decode(urlfil) DEBUG("complete_url=%s",complete_url) # Only the URL without the arguments. urlSplit = complete_url.split("?") urlNoArgs = urlSplit[0] if len(urlSplit) > 1: cgiQueryString = urlSplit[1] else: cgiQueryString = "" # The URL might be absolute or relative. Example: # "survol/sources_types/enumerate_CIM_Process.py?xid=." idxHtbin = urlNoArgs.find("sources_types/") if idxHtbin == -1: # This may be the main presentation page of a Survol, WMI or WBEM object. Example: # "http://127.0.0.1:80/Survol/survol/entity.py?xid=CIM_Process.Handle=640" survolPrefix = "survol/" idxSurvol = urlNoArgs.find(survolPrefix) if idxSurvol == -1: # TODO: This happens if the URL is a main presentation page of an object, # instead of a script: Something like "survol/entity.py/entity.py?xid=..." # This should be fixed but is not an issue. WARNING("merge: SHOULD NOT HAPPEN url=%s",complete_url) urlPathShort = "INVALID_MERGED_URL" else: # Just starts at the beginning of the script name: "entity.py", "entity_wmi.py", "entity_wbem.py". urlPathShort = urlNoArgs[idxSurvol + len(survolPrefix):] else: urlPathShort = urlNoArgs[idxHtbin:] # urlPathShort is the actual script to load. urlDirNam = os.path.dirname(urlPathShort) # The directory of the script is used to build a Python module name. moduNam = urlDirNam.replace("/",".") urlFilNam = os.path.basename(urlPathShort) DEBUG("urlPathShort=%s urlDirNam=%s moduNam=%s urlFilNam=%s",urlPathShort,urlDirNam,moduNam,urlFilNam) try: # argDir="sources_types.win32" urlFileNam="enumerate_top_level_windows.py" importedMod = lib_util.GetScriptModule(moduNam, urlFilNam) except Exception: errorMsg = sys.exc_info()[1] WARNING("Caught %s when loading moduNam=%s urlFilNam=%s",errorMsg,moduNam,urlFilNam) continue if not importedMod: cumulatedError = "merge_scripts.py Cannot import complete_url=%s" % (complete_url) continue try: # The entire URL must be "injected" so the parameters will be properly parsed, # when Main() call lib_util.RequestUri(). # The script passed as CGI parameter, believes that it is loaded as a plain URL. urlUnquote = lib_util.urllib_unquote(complete_url) os.environ["REQUEST_URI"] = urlUnquote os.environ['SCRIPT_NAME'] = urlFilNam # "xid=EURO%5CLONL00111310@process:16580" os.environ['QUERY_STRING'] = cgiQueryString lib_common.enable_error_message(False) # This executes the script: The new nodes and links are merged in a global RDF container. importedMod.Main() except Exception: errorMsg = sys.exc_info()[1] WARNING("Caught %s when executing Main in moduNam=%s urlFilNam=%s",errorMsg,moduNam,urlFilNam) if cumulatedError != "": cumulatedError += " ; " cumulatedError += " url=" + urlNoArgs + " / "+urlFilNam + ":" + str(errorMsg) continue lib_common.enable_error_message(True) os.environ["REQUEST_URI"] = origReqUri # OutCgiRdf has been called by each script without writing anything, # but the specific parameters per script are stored inside. # Here, all the RDF nodes and links, loaded from each URL, and then merged in lib_common.globalGraph, # are then transformed into the chosen output format. lib_common.MergeOutCgiRdf(theMode,cumulatedError) if __name__ == '__main__': Main()
fotisl/zlint
v3/ext/asn1c/TorServiceDescriptorSyntax.c
<reponame>fotisl/zlint<filename>v3/ext/asn1c/TorServiceDescriptorSyntax.c<gh_stars>0 /* * Generated by asn1c-0.9.29 (http://lionet.info/asn1c) * From ASN.1 module "CABFSelectedAttributeTypes" * found in "asn1/cabfev.asn1" * `asn1c -S /home/fotisl/Projects/revlintsynt/asn1c/skeletons -pdu=all -fwide-types -fcompound-names` */ #include "TorServiceDescriptorSyntax.h" static asn_oer_constraints_t asn_OER_type_TorServiceDescriptorSyntax_constr_1 CC_NOTUSED = { { 0, 0 }, -1 /* (SIZE(1..MAX)) */}; static asn_per_constraints_t asn_PER_type_TorServiceDescriptorSyntax_constr_1 CC_NOTUSED = { { APC_UNCONSTRAINED, -1, -1, 0, 0 }, { APC_SEMI_CONSTRAINED, -1, -1, 1, 0 } /* (SIZE(1..MAX)) */, 0, 0 /* No PER value map */ }; static asn_TYPE_member_t asn_MBR_TorServiceDescriptorSyntax_1[] = { { ATF_POINTER, 0, 0, (ASN_TAG_CLASS_UNIVERSAL | (16 << 2)), 0, &asn_DEF_TorServiceDescriptorHash, 0, { 0, 0, 0 }, 0, 0, /* No default value */ "" }, }; static const ber_tlv_tag_t asn_DEF_TorServiceDescriptorSyntax_tags_1[] = { (ASN_TAG_CLASS_UNIVERSAL | (16 << 2)) }; static asn_SET_OF_specifics_t asn_SPC_TorServiceDescriptorSyntax_specs_1 = { sizeof(struct TorServiceDescriptorSyntax), offsetof(struct TorServiceDescriptorSyntax, _asn_ctx), 0, /* XER encoding is XMLDelimitedItemList */ }; asn_TYPE_descriptor_t asn_DEF_TorServiceDescriptorSyntax = { "TorServiceDescriptorSyntax", "TorServiceDescriptorSyntax", &asn_OP_SEQUENCE_OF, asn_DEF_TorServiceDescriptorSyntax_tags_1, sizeof(asn_DEF_TorServiceDescriptorSyntax_tags_1) /sizeof(asn_DEF_TorServiceDescriptorSyntax_tags_1[0]), /* 1 */ asn_DEF_TorServiceDescriptorSyntax_tags_1, /* Same as above */ sizeof(asn_DEF_TorServiceDescriptorSyntax_tags_1) /sizeof(asn_DEF_TorServiceDescriptorSyntax_tags_1[0]), /* 1 */ { &asn_OER_type_TorServiceDescriptorSyntax_constr_1, &asn_PER_type_TorServiceDescriptorSyntax_constr_1, SEQUENCE_OF_constraint }, asn_MBR_TorServiceDescriptorSyntax_1, 1, /* Single element */ &asn_SPC_TorServiceDescriptorSyntax_specs_1 /* Additional specs */ };
akiryk/stories-with-firebase
src/styles/index.js
import styled from 'styled-components'; export const StyledForm = styled.form` max-width: 600px; `; export const Label = styled.label` display: block; font-family: 'Open Sans', sans-serif; font-weight: bold; margin: 20px 0 5px; `; export const Input = styled.input` font-family: 'Open Sans', sans-serif; width: 100%; `; export const FormField = styled.div` margin-bottom: 20px; `; // export { Label, Input, FormField, StyledForm };
ra-ens/abdelhakim-rafik-jee
ra-spring/src/main/java/Exceptions/ConfigFileInvalideException.java
<gh_stars>0 package Exceptions; public class ConfigFileInvalideException extends Exception { public ConfigFileInvalideException() { super("Config file has non valide formate"); } }
Jonyker/APGeneric
model_ucenter/component-ucenter/src/main/java/com/wukj/business/ucenter/app/UCenterApplication.java
package com.wukj.business.ucenter.app; import com.wukj.business.net.utils.VolleyUtils; import com.wukj.lib.CommonApplication; import com.wukj.lib.delegate.ApplicationDelegate; /** * 项目名称:NetCar_Chauffeur * 包名称:com.netcar.chauffeur.ucenter.application. * 创建时间:2018/8/7 10:55 * 作者:Jonyker * 博客:https://www.jianshu.com/u/07642698e7f4 * github:https://github.com/Jonyker * 修改人:Jonyker * 联系方式:QQ/534098845 * 修改时间:2018/8/7 10:55 * 备注: * 版本:V.1.0 * 描述: */ public class UCenterApplication extends ApplicationDelegate { @Override public int getLevel() { return LEVEL_BIZ; } @Override public Class[] subDelegates() { return new Class[] {CommonApplication.class}; } @Override public void onCreateDelegate() { VolleyUtils.init(getApplicationContext()); } }
AndreySoKuR/SPbCT_BulyninMA
Laba23/expl1/expl1/main.cpp
// // main.cpp // expl1 // // Created by Мишаня on 12.11.2020. // Copyright © 2020 m1xxos. All rights reserved. // #include <unistd.h> #include <stdlib.h> #include <stdio.h> #define A_MEGABYTE (1024 * 1024) int main() { char *some_memory; int megabyte = A_MEGABYTE; int exit_code = EXIT_FAILURE; some_memory = (char*)malloc(megabyte); if (some_memory != NULL) { sprintf(some_memory, "Hello Worldn\n"); printf("%s", some_memory); exit_code = EXIT_SUCCESS; } exit(exit_code); }
gxw1/review_the_national_post-graduate_entrance_examination
books_and_notes/professional_courses/Security/sources/extra_books/Hacking:The Art of Exploitation (Second Edition)源代码/booksrc/ppm_gen.c
/*********************************************************\ * Password Probability Matrix * File: ppm_gen.c * *********************************************************** * * * Author: <NAME> <<EMAIL>> * * Organization: Phiral Research Laboratories * * * * This is the generate program for the PPM proof of * * concept. It generates a file called 4char.ppm, which * * contains information regarding all possible 4 * * character passwords salted with 'je'. This file can * * be used to quickly crack passwords found within this * * keyspace with the corresponding ppm_crack.c program. * * * \*********************************************************/ #define _XOPEN_SOURCE #include <unistd.h> #include <stdio.h> #include <stdlib.h> #define HEIGHT 16384 #define WIDTH 1129 #define DEPTH 8 #define SIZE HEIGHT * WIDTH * DEPTH /* map a single hash byte to an enumerated value */ int enum_hashbyte(char a) { int i, j; i = (int)a; if((i >= 46) && (i <= 57)) j = i - 46; else if ((i >= 65) && (i <= 90)) j = i - 53; else if ((i >= 97) && (i <= 122)) j = i - 59; return j; } /* map 3 hash bytes to an enumerated value */ int enum_hashtriplet(char a, char b, char c) { return (((enum_hashbyte(c)%4)*4096)+(enum_hashbyte(a)*64)+enum_hashbyte(b)); } /* barf a message and exit */ void barf(char *message, char *extra) { printf(message, extra); exit(1); } /* Generate a 4char.ppm file with all possible 4 char passwords (salted w/ je) */ int main() { char plain[5]; char *code, *data; int i, j, k, l; unsigned int charval, val; FILE *handle; if (!(handle = fopen("4char.ppm", "w"))) barf("Error: Couldn't open file '4char.ppm' for writing.\n", NULL); data = (char *) malloc(SIZE); if (!(data)) barf("Error: Couldn't allocate memory.\n", NULL); for(i=32; i<127; i++) { for(j=32; j<127; j++) { printf("Adding %c%c** to 4char.ppm..\n", i, j); for(k=32; k<127; k++) { for(l=32; l<127; l++) { plain[0] = (char)i; // build every plain[1] = (char)j; // possible 4 byte plain[2] = (char)k; // password. plain[3] = (char)l; plain[4] = '\0'; code = crypt((const char *)plain, (const char *)"je"); // hash it /* lossfully store statistical info about the pairings */ val = enum_hashtriplet(code[2], code[3], code[4]); // store info about bytes 2-4 charval = (i-32)*95 + (j-32); // first 2 plaintext bytes data[(val*WIDTH)+(charval/8)] |= (1<<(charval%8)); val += (HEIGHT * 4); charval = (k-32)*95 + (l-32); // last 2 plaintext bytes data[(val*WIDTH)+(charval/8)] |= (1<<(charval%8)); val = HEIGHT + enum_hashtriplet(code[4], code[5], code[6]); // bytes 4-6 charval = (i-32)*95 + (j-32); // first 2 plaintext bytes data[(val*WIDTH)+(charval/8)] |= (1<<(charval%8)); val += (HEIGHT * 4); charval = (k-32)*95 + (l-32); // last 2 plaintext bytes data[(val*WIDTH)+(charval/8)] |= (1<<(charval%8)); val = (2 * HEIGHT) + enum_hashtriplet(code[6], code[7], code[8]); // bytes 6-8 charval = (i-32)*95 + (j-32); // first 2 plaintext bytes data[(val*WIDTH)+(charval/8)] |= (1<<(charval%8)); val += (HEIGHT * 4); charval = (k-32)*95 + (l-32); // last 2 plaintext bytes data[(val*WIDTH)+(charval/8)] |= (1<<(charval%8)); val = (3 * HEIGHT) + enum_hashtriplet(code[8], code[9], code[10]); // bytes 8-10 charval = (i-32)*95 + (j-32); // first 2 plaintext chars data[(val*WIDTH)+(charval/8)] |= (1<<(charval%8)); val += (HEIGHT * 4); charval = (k-32)*95 + (l-32); // last 2 plaintext bytes data[(val*WIDTH)+(charval/8)] |= (1<<(charval%8)); } } } } printf("finished.. saving..\n"); fwrite(data, SIZE, 1, handle); free(data); fclose(handle); }
techsparksguru/python_ci_automation
Class7/testdata_json/create_parse_json.py
import json json_string = """{ "desc":{ "someKey":"someValue", "anotherKey":"value" }, "main_item":{ "stats":{ "a":8, "b":12, "c":10 } } }""" json_dict = json.loads(json_string) print("Printing the json") print("Printing the keys and values") for i in json_dict: print(i, json_dict[i]) print("Accessing some values...") print(json_dict["desc"]["someKey"]) print(json_dict["main_item"]["stats"]["a"],"\n")
intellimate/Server
src/main/java/org/intellimate/server/mail/MailHandlerSendGrid.java
<filename>src/main/java/org/intellimate/server/mail/MailHandlerSendGrid.java package org.intellimate.server.mail; import com.sendgrid.SendGrid; import com.sendgrid.SendGridException; import org.intellimate.server.jwt.JWTHelper; import java.io.IOException; /** * @author LeanderK * @version 1.0 */ public class MailHandlerSendGrid extends MailHandler { private final SendGrid sendgrid; public MailHandlerSendGrid(JWTHelper jwtHelper, boolean disabled, String sendGridAPIKey, String deliveryEmailAddress) { super(deliveryEmailAddress, jwtHelper, disabled); this.sendgrid = new SendGrid(sendGridAPIKey); } @Override protected void sendMail(String receiver, String subject, String html) throws IOException { SendGrid.Email email = new SendGrid.Email(); email.addTo(receiver); email.setFrom(deliveryEmailAddress); email.setSubject(subject); email.setHtml(html); try { SendGrid.Response response = sendgrid.send(email); } catch (SendGridException e) { throw new IOException(e); } } }
ejctechnology/tad-boilerplate
tad-master/src/Pages/Components/Card/index.js
import React, { useEffect, useState } from "react"; import Amplify, { API, graphqlOperation } from "aws-amplify"; import { listCards } from "../../../graphql/queries"; import awsExports from "../../../aws-exports"; Amplify.configure(awsExports); function Card() { const [cards, setCards] = useState([]); useEffect(() => { fetchCards(); }, []); async function fetchCards() { try { const cardData = await API.graphql(graphqlOperation(listCards)); const cards = cardData.data.listCards.items; setCards(cards); } catch (err) { console.log("error fetching cards"); } } return ( <div> <div className="CardHero"> <h1>How does take any doubt works?</h1> {cards.map((cards, index) => ( <div className="CardGroup" key={cards.id ? cards.id : index}> <h2>{cards.title}</h2> <p>{cards.description}</p> </div> ))} </div> </div> ); } export default Card;
stopiccot/OpenSubdiv
opensubdiv/hbr/vertex.h
// // Copyright 2013 Pixar // // Licensed under the Apache License, Version 2.0 (the "Apache License") // with the following modification; you may not use this file except in // compliance with the Apache License and the following modification to it: // Section 6. Trademarks. is deleted and replaced with: // // 6. Trademarks. This License does not grant permission to use the trade // names, trademarks, service marks, or product names of the Licensor // and its affiliates, except as required to comply with Section 4(c) of // the License and to reproduce the content of the NOTICE file. // // You may obtain a copy of the Apache License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the Apache License with the above modification is // distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the Apache License for the specific // language governing permissions and limitations under the Apache License. // #ifndef HBRVERTEX_H #define HBRVERTEX_H #include <assert.h> #include <iostream> #include <iterator> #include <vector> #include "../hbr/fvarData.h" #include "../hbr/face.h" #include "../version.h" namespace OpenSubdiv { namespace OPENSUBDIV_VERSION { template <class T> class HbrHalfedge; template <class T> class HbrHalfedgeCompare; template <class T> class HbrVertex; template <class T> class HbrVertexOperator; template <class T> class HbrFaceOperator; template <class T> class HbrHalfedgeOperator; template <class T> class HbrVertex { public: HbrVertex(); HbrVertex(int vid, const T &data, int fvarwidth) { Initialize(vid, data, fvarwidth); } void Initialize(int vid, const T &data, int fvarwidth); ~HbrVertex(); void Destroy(HbrMesh<T> *mesh = 0); // Registers an incident edge with the vertex void AddIncidentEdge(HbrHalfedge<T>* edge); // Unregister an incident edge with the vertex void RemoveIncidentEdge(HbrHalfedge<T>* edge); // Checks if removal of the indicated incident edge will result // in a singular vertex bool EdgeRemovalWillMakeSingular(HbrHalfedge<T>* edge) const; // Sets up vertex flags after the vertex has been bound to a mesh void Finish(); // Compute the valence of this vertex int GetValence() const; // Compute the valence of this vertex including only edges which // are "coarse" (highest level edges) int GetCoarseValence() const; // Return vertex ID int GetID() const { return id; } // Return vertex data T& GetData() { return data; } // Return vertex data const T& GetData() const { return data; } // Returns the facevarying data which is matched to the face. // This may either be the "generic" facevarying item (fvardata, so // data.GetFace() == face) or one specifically registered to the // face (in the middle of morefvardata, so data.GetFace() == // face). If we require storage for a facevarying data designed to // store discontinuous values for this face, we must have called // NewFVarData before GetFVarData will give it to us. HbrFVarData<T>& GetFVarData(const HbrFace<T>* face); // Returns new facevarying data matched to the face HbrFVarData<T>& NewFVarData(const HbrFace<T>* face); // Return any incident face attached to the vertex HbrFace<T>* GetFace() const; // Return the mesh to which this vertex belongs HbrMesh<T>* GetMesh() const; // Return an edge connected to dest HbrHalfedge<T>* GetEdge(const HbrVertex<T>* dest) const; // Return an edge connected to vertex with id dest HbrHalfedge<T>* GetEdge(int dest) const; // Given an edge, returns the next edge in counterclockwise order // around this vertex. Note well: this is only the next halfedge, // which means that all edges returned by this function are // guaranteed to have the same origin vertex (ie this vertex). In // boundary cases if you are interested in all edges you will not // get the last edge with this function. For that reason, // GetSurroundingEdges is preferred. HbrHalfedge<T>* GetNextEdge(const HbrHalfedge<T>* edge) const; // Given an edge, returns the previous edge (ie going clockwise) // around this vertex HbrHalfedge<T>* GetPreviousEdge(const HbrHalfedge<T>* edge) const; // Quadedge-like algebra subset. Since we are dealing with // halfedges and not symmetric edges, these functions accept a // destination vertex which indicates the (possibly imaginary) // halfedge we are considering, and return the destination vertex // of the desired (also possibly imaginary) halfedge. Also, // currently they are potentially very inefficient and should be // avoided. HbrVertex<T>* GetQEONext(const HbrVertex<T>* dest) const; HbrVertex<T>* GetQEONext(const HbrHalfedge<T>* edge) const; HbrVertex<T>* GetQEOPrev(const HbrHalfedge<T>* edge) const; HbrVertex<T>* GetQEOPrev(const HbrVertex<T>* dest) const; HbrVertex<T>* GetQELNext(const HbrVertex<T>* dest) const; // Returns true if the vertex is on a boundary edge bool OnBoundary() const; // Returns true if the vertex has a facevarying mask which is // smooth (0). bool IsFVarSmooth(int datum); // Returns true if all facevarying data has facevarying mask which // is smooth bool IsFVarAllSmooth(); // Returns true if the vertex has a facevarying mask which is dart // (1). bool IsFVarDart(int datum); // Returns true if the vertex is a facevarying corner for any // incident face, where "cornerness" is defined as having two // incident edges that make up a face both being facevarying // edges. bool IsFVarCorner(int datum); // Returns the sharpness of the vertex float GetSharpness() const { return sharpness; } // Sets the sharpness of the vertex void SetSharpness(float sharp) { sharpness = sharp; ClearMask(); } // Returns whether the corner is sharp at the current level of // subdivision (next = false) or at the next level of subdivision // (next = true). bool IsSharp(bool next) const { return (next ? (sharpness > 0.0f) : (sharpness >= 1.0f)); } // Sets the vertex mask if the vertex is sharp to reflect that // it's a corner void ClearMask() { mask0 = mask1 = 0; validmask = 0; volatil = 0; } // Returns the integer mask of the vertex at the current level of // subdivision (next = false) or at the next level of subdivision // (next = true) unsigned char GetMask(bool next); // Returns the facevarying integer mask of the vertex unsigned char GetFVarMask(int datum); // Computes the "fractional mask" of the vertex at the current // subdivision level, based on the fractional sharpnesses of any // adjacent sharp edges. The fractional mask is a value between 0 // and 1 float GetFractionalMask() const; // Returns whether the vertex is singular (has two separate // incident halfedge cycles) bool IsSingular() const { return nIncidentEdges > 1; } // Collect the ring of edges around this vertex. Note well: not // all edges in this list will have an orientation where the // origin of the edge is this vertex! This function requires an // output iterator; to get the edges into a std::vector, use // GetSurroundingEdges(std::back_inserter(myvector)) template <typename OutputIterator> void GetSurroundingEdges(OutputIterator edges) const; // Apply an edge operator to each edge in the ring of edges // around this vertex void ApplyOperatorSurroundingEdges(HbrHalfedgeOperator<T> &op) const; // Collect the ring of vertices around this vertex (the ones that // share an edge with this vertex). This function requires an // output iterator; to get the vertices into a std::vector, use // GetSurroundingVertices(std::back_inserter(myvector)) template <typename OutputIterator> void GetSurroundingVertices(OutputIterator vertices) const; // Apply a vertex operator to each vertex in the ring of vertices // around this vertex void ApplyOperatorSurroundingVertices(HbrVertexOperator<T> &op) const; // Applys an operator to the ring of faces around this vertex void ApplyOperatorSurroundingFaces(HbrFaceOperator<T> &op) const; // Returns the parent, which can be a edge, face, or vertex HbrHalfedge<T>* GetParentEdge() const { return (parentType == k_ParentEdge ? parent.edge : 0); } HbrFace<T>* GetParentFace() const { return (parentType == k_ParentFace ? parent.face : 0); } HbrVertex<T>* GetParentVertex() const { return (parentType == k_ParentVertex ? parent.vertex : 0); } // Set the parent pointer void SetParent(HbrHalfedge<T>* edge) { assert(!edge || !parent.vertex); parentType = k_ParentEdge; parent.edge = edge; } void SetParent(HbrFace<T>* face) { assert(!face || !parent.vertex); parentType = k_ParentFace; parent.face = face; } void SetParent(HbrVertex<T>* vertex) { assert(!vertex || !parent.vertex); parentType = k_ParentVertex; parent.vertex = vertex; } // Subdivides the vertex and returns the child vertex HbrVertex<T>* Subdivide(); // Refines the ring of faces around this vertex void Refine(); // Make sure the vertex has all faces in the ring around it void GuaranteeNeighbors(); // Indicates that the vertex may have a missing face neighbor and // may need to guarantee its neighbors in the future void UnGuaranteeNeighbors() { neighborsguaranteed = 0; // Its mask is also invalidated validmask = 0; } // True if the edge has a subdivided child vertex bool HasChild() const { return vchild!=-1; } // Remove the reference to subdivided vertex void RemoveChild() { vchild = -1; } // Returns true if the vertex still has an incident edge (in other // words, it belongs to a face) bool IsReferenced() const { return references != 0; } // Returns true if the vertex is extraordinary bool IsExtraordinary() const { return extraordinary; } // Tag the vertex as being extraordinary void SetExtraordinary() { extraordinary = 1; } // Returns whether the vertex is volatile (incident to a semisharp // edge or semisharp corner) bool IsVolatile() { if (!validmask) GetMask(false); return volatil; } // Simple bookkeeping needed for garbage collection by HbrMesh bool IsCollected() const { return collected; } void SetCollected() { collected = 1; } void ClearCollected() { collected = 0; } // Bookkeeping to see if a vertex edit exists for this vertex bool HasVertexEdit() const { return hasvertexedit; } void SetVertexEdit() { hasvertexedit = 1; } void ClearVertexEdit() { hasvertexedit = 0; } // Returns memory statistics unsigned long GetMemStats() const; // Returns true if the vertex is connected. This means that it has // an incident edge bool IsConnected() const { return nIncidentEdges > 0; } // Return an incident edge to this vertex, which happens to be the // first halfedge of the cycles. HbrHalfedge<T>* GetIncidentEdge() const { if (nIncidentEdges > 1) { return incident.edges[0]; } else if (nIncidentEdges == 1) { return incident.edge; } else { return 0; } } // Sharpness and mask constants enum Mask { k_Smooth = 0, k_Dart = 1, k_Crease = 2, k_Corner = 3, k_InfinitelySharp = 10 }; // Increment the usage counter on the vertex void IncrementUsage() { used++; } // Decrement the usage counter on the vertex void DecrementUsage() { used--; } // Check the usage counter on the vertex bool IsUsed() const { return used || (vchild != -1); } // Used by block allocator HbrVertex<T>*& GetNext() { return parent.vertex; } // Returns the blind pointer to client data void *GetClientData(HbrMesh<T>* mesh) const { return mesh->GetVertexClientData(id); } // Sets the blind pointer to client data void SetClientData(HbrMesh<T> *mesh, void *data) { mesh->SetVertexClientData(id, data); } enum ParentType { k_ParentNone, k_ParentFace, k_ParentEdge, k_ParentVertex }; private: // Splits a singular vertex into multiple nonsingular vertices void splitSingular(); // Data T data; // Pointer to extra facevarying data. Space for this is allocated // by NewFVarData. This struct is actually overpadded. struct morefvardata { int count; } *morefvar; // Unique ID of this vertex int id; // The number of halfedges which have this vertex as the incident // edge. When references == 0, the vertex is safe to delete int references; // The number of faces marked "used" which share this vertex. May // not be the same as references! int used; // Sharpness float sharpness; // Index of child vertex int vchild; // Size of incident array unsigned short nIncidentEdges; // Vertex masks, at this level of subdivision and at the next // level of subdivision. Valid only when validmask = 1. unsigned short mask0:3; unsigned short mask1:3; // Extraordinary bit unsigned short extraordinary:1; // Whether the current mask value is correct or should be recalculated unsigned short validmask:1; // Whether the vertex is "volatile" (is incident to a semisharp edge, // or is a semisharp corner) unsigned short volatil:1; // Whether we can guarantee the existence of neighboring faces on // this vertex unsigned short neighborsguaranteed:1; // Bookkeeping for HbrMesh unsigned short collected:1; // Whether the vertex has an edit. The edit is owned by a face // so this is just a tag that indicates we need to search the // vertex's neighboring faces for an edit unsigned short hasvertexedit:1; // Whether the vertex edit (if any) has been applied unsigned short editsapplied:1; // Whether Destroy() has been called unsigned short destroyed:1; // Parent type - can be face, edge, or vertex unsigned short parentType:2; // List of edge cycles. For "singular" vertices, the corresponding // set of adjacent halfedges may consist of several cycles, and we // need to account for all of them here. In cases where // nIncidentEdges is 1, the edge field of the union points // directly at the edge which starts the only incident cycle. If // nIncidnetEdges is 2 or more, the edges field of the union is a // separate allocated array and edge member of the array points at // separate cycles. union { HbrHalfedge<T>* edge; HbrHalfedge<T>** edges; } incident; union { HbrFace<T>* face; HbrHalfedge<T>* edge; HbrVertex<T>* vertex; } parent; #ifdef HBR_ADAPTIVE public: struct adaptiveFlags { unsigned isTagged:1; unsigned wasTagged:1; adaptiveFlags() : isTagged(0), wasTagged(0) { } }; adaptiveFlags _adaptiveFlags; #endif }; template <class T> HbrVertex<T>::HbrVertex() : morefvar(0), id(-1), references(0), used(0), sharpness(0.0f), vchild(-1), nIncidentEdges(0), extraordinary(0), validmask(0), volatil(0), neighborsguaranteed(0), collected(0), hasvertexedit(0), editsapplied(0), destroyed(0), parentType(k_ParentNone) { ClearMask(); parent.vertex = 0; incident.edge = 0; } template <class T> void HbrVertex<T>::Initialize(int vid, const T &vdata, int fvarwidth) { data = vdata; morefvar = 0 ; id = vid; references = 0; used = 0; extraordinary = 0; ClearMask(); neighborsguaranteed = 0; collected = 0; hasvertexedit = 0; editsapplied = 0; destroyed = 0; sharpness = 0.0f; nIncidentEdges = 0; vchild = -1; assert(!parent.vertex); parentType = k_ParentVertex; parent.vertex = 0; if (fvarwidth) { // Upstream allocator ensured the class was padded by the // appropriate size. GetFVarData will return a pointer to this // memory, but it needs to be properly initialized. // Run placement new to initialize datum char *buffer = ((char*) this + sizeof(*this)); new (buffer) HbrFVarData<T>(); } } template <class T> HbrVertex<T>::~HbrVertex() { Destroy(); } template <class T> void HbrVertex<T>::Destroy(HbrMesh<T> *mesh) { if (!destroyed) { // Vertices are only safe for deletion if the number of incident // edges is exactly zero. assert(references == 0); // Delete parent reference to self if (parentType == k_ParentEdge && parent.edge) { parent.edge->RemoveChild(); parent.edge = 0; } else if (parentType == k_ParentFace && parent.face) { parent.face->RemoveChild(); parent.face = 0; } else if (parentType == k_ParentVertex && parent.vertex) { parent.vertex->RemoveChild(); parent.vertex = 0; } // Orphan the child vertex if (vchild != -1) { if (mesh) { HbrVertex<T> *vchildVert = mesh->GetVertex(vchild); vchildVert->SetParent(static_cast<HbrVertex*>(0)); } vchild = -1; } // We're skipping the placement destructors here, in the // assumption that HbrFVarData's destructor doesn't actually do // anything much if (morefvar) { free(morefvar); } destroyed = 1; } } template <class T> void HbrVertex<T>::AddIncidentEdge(HbrHalfedge<T>* edge) { assert(edge->GetOrgVertex() == this); // First, maintain the property that all of the incident edges // will always be a boundary edge if possible. If any of the // incident edges are no longer boundaries at this point then they // can be immediately removed. int i; unsigned short newEdgeCount = 0; bool edgeFound = false; HbrHalfedge<T>** incidentEdges = (nIncidentEdges > 1) ? incident.edges : &incident.edge; for (i = 0; i < nIncidentEdges; ++i) { if (incidentEdges[i] == edge) { edgeFound = true; } if (incidentEdges[i]->IsBoundary()) { incidentEdges[newEdgeCount++] = incidentEdges[i]; } else { // Did this edge suddenly stop being a boundary because // the newly introduced edge (or something close to it) // closed a cycle? If so, we don't want to lose a pointer // to this edge cycle! So check to see if this cycle is // complete, and if so, keep it. HbrHalfedge<T>* start = incidentEdges[i]; HbrHalfedge<T>* edge = start; bool prevmatch = false; do { edge = GetNextEdge(edge); // Check all previous incident edges, if already // encountered then we have an edge to this cycle and // don't need to proceed further with this check for (int j = 0; j < i; ++j) { if (incidentEdges[j] == edge) { prevmatch = true; break; } } } while (!prevmatch && edge && edge != start); if (!prevmatch && edge && edge == start) { incidentEdges[newEdgeCount++] = incidentEdges[i]; } } } // If we are now left with no incident edges, then this edge // becomes the sole incident edge (since we always need somewhere // to start, even if it's a uninterrupted cycle [ie it doesn't // matter whether the edge is a boundary]). Restore incidentEdges // array to point to the end of the object. if (newEdgeCount == 0) { if (!(edgeFound && nIncidentEdges == 1)) { if (nIncidentEdges > 1) { delete [] incidentEdges; } incidentEdges = &incident.edge; incidentEdges[0] = edge; nIncidentEdges = 1; } } // Otherwise, we already have a set of incident edges - we only // add this edge if it's a boundary edge, which would begin a new // cycle. else if (edge->IsBoundary()) { if (!edgeFound) { // Must add the new edge. May need to reallocate here. if (newEdgeCount + 1 != nIncidentEdges) { HbrHalfedge<T>** newIncidentEdges = 0; if (newEdgeCount + 1 > 1) { newIncidentEdges = new HbrHalfedge<T>*[newEdgeCount + 1]; } else { newIncidentEdges = &incident.edge; } for (i = 0; i < newEdgeCount; ++i) { newIncidentEdges[i] = incidentEdges[i]; } if (nIncidentEdges > 1) { delete[] incidentEdges; } nIncidentEdges = newEdgeCount + 1; incidentEdges = newIncidentEdges; if (nIncidentEdges > 1) { incident.edges = newIncidentEdges; } } incidentEdges[newEdgeCount] = edge; } else { // Edge is already in our list, so we don't need to add it // again. However, we may need to reallocate due to above // cleaning of nonboundary edges if (newEdgeCount != nIncidentEdges) { HbrHalfedge<T>** newIncidentEdges = 0; if (newEdgeCount > 1) { newIncidentEdges = new HbrHalfedge<T>*[newEdgeCount]; } else { newIncidentEdges = &incident.edge; } for (i = 0; i < newEdgeCount; ++i) { newIncidentEdges[i] = incidentEdges[i]; } if (nIncidentEdges > 1) { delete[] incidentEdges; } nIncidentEdges = newEdgeCount; incidentEdges = newIncidentEdges; if (nIncidentEdges > 1) { incident.edges = newIncidentEdges; } } } } else { // Again, we may need to reallocate due to above cleaning of // nonboundary edges if (newEdgeCount != nIncidentEdges) { HbrHalfedge<T>** newIncidentEdges = 0; if (newEdgeCount > 1) { newIncidentEdges = new HbrHalfedge<T>*[newEdgeCount]; } else { newIncidentEdges = &incident.edge; } for (i = 0; i < newEdgeCount; ++i) { newIncidentEdges[i] = incidentEdges[i]; } if (nIncidentEdges > 1) { delete[] incidentEdges; } nIncidentEdges = newEdgeCount; incidentEdges = newIncidentEdges; if (nIncidentEdges > 1) { incident.edges = newIncidentEdges; } } } // For non-boundary edges, ensure that the incident edge starting // the cycle is the lowest possible edge. By doing this, // operations like GetSurroundingEdges will be guaranteed to // return the same order of edges/faces through multi-threading. if (!incidentEdges[0]->IsBoundary()) { HbrHalfedge<T>* start = GetIncidentEdge(); incidentEdges[0] = start; HbrFacePath incidentEdgePath = incidentEdges[0]->GetFace()->GetPath(); HbrHalfedge<T>* e = GetNextEdge(start); while (e) { if (e == start) break; HbrFacePath ePath = e->GetFace()->GetPath(); if (ePath < incidentEdgePath) { incidentEdges[0] = e; incidentEdgePath = ePath; } HbrHalfedge<T>* next = GetNextEdge(e); if (!next) { e = e->GetPrev(); if (e->GetFace()->GetPath() < incidentEdges[0]->GetFace()->GetPath()) { incidentEdges[0] = e; } break; } else { e = next; } } } references++; } template <class T> void HbrVertex<T>::RemoveIncidentEdge(HbrHalfedge<T>* edge) { int i, j; HbrHalfedge<T>** incidentEdges = (nIncidentEdges > 1) ? incident.edges : &incident.edge; references--; if (references) { HbrHalfedge<T>* next; // We may need to shuffle our halfedge cycles. First we check // whether the edge being erased begins any edge cycles bool edgeFound = false; next = GetNextEdge(edge); for (i = 0; i < nIncidentEdges; ++i) { if (incidentEdges[i] == edge) { // Edge cycle found. Replace the edge with the next edge // in the cycle if possible. if (next) { incidentEdges[i] = next; // We are done. return; } // If no next edge is found it means the entire cycle // has gone away. edgeFound = true; break; } } // The edge cycle needs to disappear if (edgeFound) { assert(nIncidentEdges > 1); HbrHalfedge<T>** newIncidentEdges = 0; if (nIncidentEdges - 1 > 1) { newIncidentEdges = new HbrHalfedge<T>*[nIncidentEdges - 1]; } else { newIncidentEdges = &incident.edge; } j = 0; for (i = 0; i < nIncidentEdges; ++i) { if (incidentEdges[i] != edge) { newIncidentEdges[j++] = incidentEdges[i]; } } assert(j == nIncidentEdges - 1); if (nIncidentEdges > 1) { delete[] incidentEdges; } nIncidentEdges--; if (nIncidentEdges > 1) { incident.edges = newIncidentEdges; } return; } // Now deal with the case where we remove an edge // which did not begin a boundary edge cycle. If this // happens then the resulting unbroken cycle does // get broken; in that case we replace the incident // edge with the next one after this. else if (nIncidentEdges == 1 && !incidentEdges[0]->IsBoundary()) { if (next) { incidentEdges[0] = next; } else { // hm, what does this mean for us? Not sure at the // moment. std::cout << "Could not split cycle!\n"; assert(0); } } // (Is this another case or a specialization of the above?) // When an edge in the middle of a boundary cycle goes away we // need to mark a new cycle. // // If there is no next edge, it means that we didn't // actually split the cycle, we just deleted the last edge // in the cycle. As such nothing needs to occur because // the "split" is already present. else if (!edge->IsBoundary() && next) { HbrHalfedge<T>** newIncidentEdges = 0; if (nIncidentEdges + 1 > 1) { newIncidentEdges = new HbrHalfedge<T>*[nIncidentEdges + 1]; } else { newIncidentEdges = &incident.edge; } for (i = 0; i < nIncidentEdges; ++i) { newIncidentEdges[i] = incidentEdges[i]; } newIncidentEdges[nIncidentEdges] = next; if (nIncidentEdges > 1) { delete[] incidentEdges; } nIncidentEdges++; if (nIncidentEdges > 1) { incident.edges = newIncidentEdges; } } } else { // No references left, we can just clear all the cycles if (nIncidentEdges > 1) { delete[] incidentEdges; } nIncidentEdges = 0; } } template <class T> bool HbrVertex<T>::EdgeRemovalWillMakeSingular(HbrHalfedge<T>* edge) const { // Only edge left, or no incident edges at all (how?) if (references <= 1 || nIncidentEdges <= 0) { return false; } // There are at least two existing cycles. We could maybe consider // the case where removal of this edge will actually make one of // the edge cycles go away, possibly leaving behind just one, but // we'll ignore that possibility for now else if (nIncidentEdges > 1) { return true; } // This is the incident edge starting a single cycle. Removal of // the edge will replace the start of the cycle with the next // edge, and we keep a single cycle. else if (nIncidentEdges == 1 && incident.edge == edge) { return false; } // Check the single cycle: was it interrupted? (i.e. a // boundary). If not interrupted, then deletion of any edge still // leaves a single cycle. Otherwise: if the edge is the *last* // edge in the cycle, we still don't need to split the any further // cycle. Otherwise we must split the cycle, which would result in // a singular vertex else if (!GetIncidentEdge()->IsBoundary()) { return false; } else if (GetNextEdge(edge)) { return true; } else { return false; } } template <class T> void HbrVertex<T>::Finish() { extraordinary = false; if (HbrMesh<T>* mesh = GetMesh()) { if (IsSingular()) splitSingular(); assert(!IsSingular()); if (mesh->GetSubdivision()) { extraordinary = mesh->GetSubdivision()->VertexIsExtraordinary(mesh, this); } } } template <class T> int HbrVertex<T>::GetValence() const { int valence = 0; assert(!IsSingular()); HbrHalfedge<T>* start = (nIncidentEdges > 1) ? incident.edges[0] : incident.edge; HbrHalfedge<T>* edge = start; if (edge) do { valence++; edge = GetNextEdge(edge); } while (edge && edge != start); // In boundary cases, we increment the valence count by // one more if (!edge) valence++; return valence; } template <class T> int HbrVertex<T>::GetCoarseValence() const { int valence = 0; assert(!IsSingular()); HbrHalfedge<T>* start = (nIncidentEdges > 1) ? incident.edges[0] : incident.edge; HbrHalfedge<T>* edge = start; if (edge) do { if (edge->IsCoarse()) { valence++; } edge = GetNextEdge(edge); } while (edge && edge != start); // In boundary cases, we increment the valence count by one more // (this assumes the last edge is coarse, which it had better be // in the boundary case!) if (!edge) valence++; return valence; } template <class T> HbrFVarData<T>& HbrVertex<T>::GetFVarData(const HbrFace<T>* face) { // See if there are any extra facevarying datum associated with // this vertex, and whether any of them match the face. if (morefvar) { size_t fvtsize = sizeof(HbrFVarData<T>) + sizeof(float) * (GetMesh()->GetTotalFVarWidth() - 1); HbrFVarData<T> *fvt = (HbrFVarData<T> *)((char *) morefvar + sizeof(int)); for (int i = 0; i < morefvar->count; ++i) { if (fvt->GetFaceID() == face->GetID()) { return *fvt; } fvt = (HbrFVarData<T>*)((char*) fvt + fvtsize); } } // Otherwise, return the default facevarying datum, which lives // in the overallocated space after the end of this object return *((HbrFVarData<T>*) ((char*) this + sizeof(*this))); } template <class T> HbrFVarData<T>& HbrVertex<T>::NewFVarData(const HbrFace<T>* face) { const int fvarwidth = GetMesh()->GetTotalFVarWidth(); size_t fvtsize = sizeof(HbrFVarData<T>) + (fvarwidth - 1) * sizeof(float); if (morefvar) { struct morefvardata *newmorefvar = (struct morefvardata *) malloc(sizeof(int) + (morefvar->count + 1) * fvtsize); HbrFVarData<T> *newfvt = (HbrFVarData<T> *)((char *) newmorefvar + sizeof(int)); HbrFVarData<T> *oldfvt = (HbrFVarData<T> *)((char *) morefvar + sizeof(int)); for (int i = 0; i < morefvar->count; ++i) { new (newfvt) HbrFVarData<T>(); newfvt->SetAllData(fvarwidth, oldfvt->GetData(0)); newfvt->SetFaceID(oldfvt->GetFaceID()); oldfvt = (HbrFVarData<T>*)((char*) oldfvt + fvtsize); newfvt = (HbrFVarData<T>*)((char*) newfvt + fvtsize); } new (newfvt) HbrFVarData<T>(); newfvt->SetFaceID(face->GetID()); newmorefvar->count = morefvar->count + 1; free(morefvar); morefvar = newmorefvar; return *newfvt; } else { morefvar = (struct morefvardata *) malloc(sizeof(int) + fvtsize); HbrFVarData<T> *newfvt = (HbrFVarData<T> *)((char *) morefvar + sizeof(int)); new (newfvt) HbrFVarData<T>(); newfvt->SetFaceID(face->GetID()); morefvar->count = 1; return *newfvt; } } template <class T> HbrFace<T>* HbrVertex<T>::GetFace() const { return GetIncidentEdge()->GetFace(); } template <class T> HbrMesh<T>* HbrVertex<T>::GetMesh() const { return GetFace()->GetMesh(); } template <class T> HbrHalfedge<T>* HbrVertex<T>::GetEdge(const HbrVertex<T>* dest) const { // Here, we generally want to go through all halfedge cycles for (int i = 0; i < nIncidentEdges; ++i) { HbrHalfedge<T>* cycle = (nIncidentEdges > 1) ? incident.edges[i] : incident.edge; HbrHalfedge<T>* edge = cycle; if (edge) do { if (edge->GetDestVertex() == dest) { return edge; } edge = GetNextEdge(edge); } while (edge && edge != cycle); } return 0; } template <class T> HbrHalfedge<T>* HbrVertex<T>::GetEdge(int dest) const { // Here, we generally want to go through all halfedge cycles for (int i = 0; i < nIncidentEdges; ++i) { HbrHalfedge<T>* cycle = (nIncidentEdges > 1) ? incident.edges[i] : incident.edge; HbrHalfedge<T>* edge = cycle; if (edge) do { if (edge->GetDestVertexID() == dest) { return edge; } edge = GetNextEdge(edge); } while (edge && edge != cycle); } return 0; } template <class T> HbrHalfedge<T>* HbrVertex<T>::GetNextEdge(const HbrHalfedge<T>* edge) const { // Paranoia: // if (edge->GetOrgVertex() != this) return 0; return edge->GetPrev()->GetOpposite(); } template <class T> HbrHalfedge<T>* HbrVertex<T>::GetPreviousEdge(const HbrHalfedge<T>* edge) const { // Paranoia: // if (edge->GetOrgVertex() != this) return 0; return edge->GetOpposite()->GetNext(); } template <class T> HbrVertex<T>* HbrVertex<T>::GetQEONext(const HbrVertex<T>* dest) const { HbrHalfedge<T>* edge = GetEdge(dest); if (edge) { return edge->GetPrev()->GetOrgVertex(); } HbrHalfedge<T>* start = GetIncidentEdge(), *next; edge = start; while (edge) { next = GetNextEdge(edge); if (edge->GetDestVertex() == dest) { if (!next) { return edge->GetPrev()->GetOrgVertex(); } else { return next->GetDestVertex(); } } if (next == start) { return 0; } else if (!next) { if (edge->GetPrev()->GetOrgVertex() == dest) { return start->GetDestVertex(); } else { return 0; } } else { edge = next; } } // Shouldn't get here return 0; } template <class T> HbrVertex<T>* HbrVertex<T>::GetQEONext(const HbrHalfedge<T>* edge) const { assert(edge->GetOrgVertex() == this); return edge->GetPrev()->GetOrgVertex(); } template <class T> HbrVertex<T>* HbrVertex<T>::GetQEOPrev(const HbrVertex<T>* dest) const { HbrHalfedge<T>* edge = GetEdge(dest); if (edge) { if (edge->GetOpposite()) { return edge->GetOpposite()->GetNext()->GetDestVertex(); } else { HbrHalfedge<T>* start = GetIncidentEdge(), *next; edge = start; while (edge) { next = GetNextEdge(edge); if (next == start) { if (next->GetDestVertex() == dest) { return edge->GetDestVertex(); } else { return 0; } } else if (!next) { if (edge->GetPrev()->GetOrgVertex() == dest) { return edge->GetDestVertex(); } else if (start->GetDestVertex() == dest) { return edge->GetPrev()->GetOrgVertex(); } else { return 0; } } else if (next->GetDestVertex() == dest) { return edge->GetDestVertex(); } else { edge = next; } } return 0; } } edge = dest->GetEdge(this); if (edge) { return edge->GetNext()->GetDestVertex(); } return 0; } template <class T> HbrVertex<T>* HbrVertex<T>::GetQEOPrev(const HbrHalfedge<T>* edge) const { assert(edge->GetOrgVertex() == this); if (edge->GetOpposite()) { return edge->GetOpposite()->GetNext()->GetDestVertex(); } else { return GetQEOPrev(edge->GetDestVertex()); } } template <class T> HbrVertex<T>* HbrVertex<T>::GetQELNext(const HbrVertex<T>* dest) const { HbrHalfedge<T>* edge = GetEdge(dest); if (edge) { return edge->GetNext()->GetDestVertex(); } edge = dest->GetEdge(this); if (edge) { return edge->GetPrev()->GetOrgVertex(); } return 0; } template <class T> bool HbrVertex<T>::OnBoundary() const { // We really only need to check the first incident edge, since // singular vertices by definition are on the boundary return GetIncidentEdge()->IsBoundary(); } template <class T> bool HbrVertex<T>::IsFVarSmooth(int datum) { return (GetFVarMask(datum) == k_Smooth); } template <class T> bool HbrVertex<T>::IsFVarAllSmooth() { for (int i = 0; i < GetMesh()->GetFVarCount(); ++i) { if (!IsFVarSmooth(i)) return false; } return true; } template <class T> bool HbrVertex<T>::IsFVarDart(int datum) { return (GetFVarMask(datum) == k_Dart); } template <class T> bool HbrVertex<T>::IsFVarCorner(int datum) { // If it's a dart, it's a corner if (IsFVarDart(datum)) return true; // Run through surrounding edges, looking for two adjacent // facevarying boundary edges HbrHalfedge<T>* start = GetIncidentEdge(), *edge, *nextedge; edge = start; bool lastedgewassharp = false; while (edge) { if (edge->GetFVarSharpness(datum)) { if (lastedgewassharp) { return true; } else { lastedgewassharp = true; } } else { lastedgewassharp = false; } nextedge = GetNextEdge(edge); if (nextedge == start) { return start->GetFVarSharpness(datum) && lastedgewassharp; } else if (!nextedge) { // Special case for the last edge in a cycle. edge = edge->GetPrev(); return edge->GetFVarSharpness(datum) && lastedgewassharp; } else { edge = nextedge; } } return false; } template <class T> unsigned char HbrVertex<T>::GetMask(bool next) { if (validmask) { return (unsigned char)(next ? mask1 : mask0); } mask0 = mask1 = 0; // Mark volatility if (sharpness > k_Smooth && sharpness < k_InfinitelySharp) volatil = 1; // If the vertex is tagged as sharp immediately promote its mask // to corner if (IsSharp(false)) { mask0 += k_Corner; } if (IsSharp(true)) { mask1 += k_Corner; } // Count the number of surrounding sharp edges HbrHalfedge<T>* start = GetIncidentEdge(), *edge, *nextedge; edge = start; while (edge) { float esharp = edge->GetSharpness(); if (edge->IsSharp(false)) { if (mask0 < k_Corner) { mask0++; } } if (edge->IsSharp(true)) { if (mask1 < k_Corner) { mask1++; } } // If any incident edge is semisharp, mark the vertex as volatile if (esharp > HbrHalfedge<T>::k_Smooth && esharp < HbrHalfedge<T>::k_InfinitelySharp) { volatil = 1; } nextedge = GetNextEdge(edge); if (nextedge == start) { break; } else if (!nextedge) { // Special case for the last edge in a cycle. edge = edge->GetPrev(); esharp = edge->GetSharpness(); if (edge->IsSharp(false)) { if (mask0 < k_Corner) { mask0++; } } if (edge->IsSharp(true)) { if (mask1 < k_Corner) { mask1++; } } if (esharp > HbrHalfedge<T>::k_Smooth && esharp < HbrHalfedge<T>::k_InfinitelySharp) { volatil = 1; } break; } else { edge = nextedge; } } validmask = 1; return (unsigned char)(next ? mask1 : mask0); } template <class T> unsigned char HbrVertex<T>::GetFVarMask(int datum) { unsigned char mask = 0; // If the vertex is tagged as sharp immediately promote its mask // to corner if (IsSharp(false)) { mask += k_Corner; } // Count the number of surrounding facevarying boundary edges HbrHalfedge<T>* start = GetIncidentEdge(), *edge, *nextedge; edge = start; while (edge) { if (edge->GetFVarSharpness(datum)) { if (mask < k_Corner) { mask++; } else { // Can't get any sharper, so give up early break; } } nextedge = GetNextEdge(edge); if (nextedge == start) { break; } else if (!nextedge) { // Special case for the last edge in a cycle. edge = edge->GetPrev(); if (edge->GetFVarSharpness(datum)) { if (mask < k_Corner) { mask++; } } break; } else { edge = nextedge; } } return mask; } template <class T> float HbrVertex<T>::GetFractionalMask() const { float mask = 0; float n = 0; if (sharpness > k_Smooth && sharpness < k_Dart) { mask += sharpness; ++n; } // Add up the strengths of surrounding fractional sharp edges HbrHalfedge<T>* start = GetIncidentEdge(), *edge, *next; edge = start; while (edge) { float esharp = edge->GetSharpness(); if (esharp > HbrHalfedge<T>::k_Smooth && esharp < HbrHalfedge<T>::k_Sharp) { mask += esharp; ++n; } next = GetNextEdge(edge); if (next == start) { break; } else if (!next) { // Special case for the last edge in a cycle. esharp = edge->GetPrev()->GetSharpness(); if (esharp > HbrHalfedge<T>::k_Smooth && esharp < HbrHalfedge<T>::k_Sharp) { mask += esharp; ++n; } break; } else { edge = next; } } assert (n > 0.0f && mask < n); return (mask / n); } template <class T> template <typename OutputIterator> void HbrVertex<T>::GetSurroundingEdges(OutputIterator edges) const { HbrHalfedge<T>* start = GetIncidentEdge(), *edge, *next; edge = start; while (edge) { *edges++ = edge; next = GetNextEdge(edge); if (next == start) { break; } else if (!next) { // Special case for the last edge in a cycle. *edges++ = edge->GetPrev(); break; } else { edge = next; } } } template <class T> void HbrVertex<T>::ApplyOperatorSurroundingEdges(HbrHalfedgeOperator<T> &op) const { HbrHalfedge<T>* start = GetIncidentEdge(), *edge, *next; edge = start; while (edge) { op(*edge); next = GetNextEdge(edge); if (next == start) { break; } else if (!next) { op(*edge->GetPrev()); break; } else { edge = next; } } } template <class T> template <typename OutputIterator> void HbrVertex<T>::GetSurroundingVertices(OutputIterator vertices) const { HbrMesh<T>* mesh = GetMesh(); HbrHalfedge<T>* start = GetIncidentEdge(), *edge, *next; edge = start; while (edge) { *vertices++ = edge->GetDestVertex(mesh); next = GetNextEdge(edge); if (next == start) { break; } else if (!next) { // Special case for the last edge in a cycle: the last // vertex on that cycle is not the destination of an // outgoing halfedge *vertices++ = edge->GetPrev()->GetOrgVertex(mesh); break; } else { edge = next; } } } template <class T> void HbrVertex<T>::ApplyOperatorSurroundingVertices(HbrVertexOperator<T> &op) const { HbrMesh<T>* mesh = GetMesh(); HbrHalfedge<T>* start = GetIncidentEdge(), *edge, *next; edge = start; while (edge) { op(*edge->GetDestVertex(mesh)); next = GetNextEdge(edge); if (next == start) return; else if (!next) { op(*edge->GetPrev()->GetOrgVertex(mesh)); return; } else { edge = next; } } } template <class T> void HbrVertex<T>::ApplyOperatorSurroundingFaces(HbrFaceOperator<T> &op) const { HbrHalfedge<T>* start = GetIncidentEdge(), *edge; edge = start; while (edge) { op(*edge->GetLeftFace()); edge = GetNextEdge(edge); if (edge == start) break; } } template <class T> HbrVertex<T>* HbrVertex<T>::Subdivide() { HbrMesh<T>* mesh = GetMesh(); if (vchild != -1) return mesh->GetVertex(vchild); HbrVertex<T>* vchildVert = mesh->GetSubdivision()->Subdivide(mesh, this); vchild = vchildVert->GetID(); vchildVert->SetParent(this); return vchildVert; } template <class T> void HbrVertex<T>::Refine() { HbrMesh<T>* mesh = GetMesh(); mesh->GetSubdivision()->RefineAtVertex(mesh, this); } template <class T> void HbrVertex<T>::GuaranteeNeighbors() { if (!neighborsguaranteed) { HbrMesh<T>* mesh = GetMesh(); mesh->GetSubdivision()->GuaranteeNeighbors(mesh, this); neighborsguaranteed = 1; // At this point we can apply vertex edits because we have all // surrounding faces, and know whether any of them has // necessary edit information (they would have set our // hasvertexedit bit) if (hasvertexedit && !editsapplied) { HbrHalfedge<T>* start = GetIncidentEdge(), *edge; edge = start; while (edge) { HbrFace<T>* face = edge->GetLeftFace(); if (HbrHierarchicalEdit<T>** edits = face->GetHierarchicalEdits()) { while (HbrHierarchicalEdit<T>* edit = *edits) { if (!edit->IsRelevantToFace(face)) break; edit->ApplyEditToVertex(face, this); edits++; } } edge = GetNextEdge(edge); if (edge == start) break; } editsapplied = 1; } } } template <class T> unsigned long HbrVertex<T>::GetMemStats() const { return sizeof(HbrVertex<T>); } template <class T> void HbrVertex<T>::splitSingular() { HbrMesh<T>* mesh = GetMesh(); HbrHalfedge<T>* e; HbrHalfedge<T>** incidentEdges = (nIncidentEdges > 1) ? incident.edges : &incident.edge; // Go through each edge cycle after the first std::vector<HbrHalfedge<T>*> edges; for (int i = 1; i < nIncidentEdges; ++i) { // Create duplicate vertex HbrVertex<T>* w = mesh->NewVertex(); w->GetData().AddWithWeight(GetData(), 1.0); w->SetSharpness(GetSharpness()); // Walk all edges in this cycle and reattach them to duplicate // vertex HbrHalfedge<T>* start = incidentEdges[i]; e = start; edges.clear(); do { edges.push_back(e); e = GetNextEdge(e); } while (e && e != start); for (typename std::vector<HbrHalfedge<T>*>::iterator ei = edges.begin(); ei != edges.end(); ++ei) { e = *ei; if (e->GetOpposite()) { HbrHalfedge<T>* next = e->GetOpposite()->GetNext(); if (next->GetOrgVertex() == this) { references--; next->SetOrgVertex(w); w->AddIncidentEdge(next); } } // Check again, because sometimes it's been relinked by // previous clause already if (e->GetOrgVertex() == this) { references--; e->SetOrgVertex(w); w->AddIncidentEdge(e); } } w->Finish(); #ifdef HBR_ADAPTIVE mesh->addSplitVertex(w->GetID(), this->GetID()); #endif } e = incidentEdges[0]; if (nIncidentEdges > 1) { delete[] incidentEdges; } nIncidentEdges = 1; incident.edge = e; } template <class T> std::ostream& operator<<(std::ostream& out, const HbrVertex<T>& vertex) { return out << "vertex " << vertex.GetID(); } template <class T> class HbrVertexOperator { public: virtual void operator() (HbrVertex<T> &vertex) = 0; virtual ~HbrVertexOperator() {} }; } // end namespace OPENSUBDIV_VERSION using namespace OPENSUBDIV_VERSION; } // end namespace OpenSubdiv #endif /* HBRVERTEX_H */
iecedge/xos
xos/synchronizers/new_base/mock_modelaccessor_build.py
<reponame>iecedge/xos<gh_stars>0 # Copyright 2017-present Open Networking Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import cPickle import subprocess """ Support for autogenerating mock_modelaccessor. Each unit test might have its own requirements for the set of xprotos that make up its model testing framework. These should always include the core, and optionally include one or more services. """ def build_mock_modelaccessor( xos_dir, services_dir, service_xprotos, target="mock_classes.xtarget" ): dest_fn = os.path.join( xos_dir, "synchronizers", "new_base", "mock_modelaccessor.py" ) args = ["xosgenx", "--target", target] args.append(os.path.join(xos_dir, "core/models/core.xproto")) for xproto in service_xprotos: args.append(os.path.join(services_dir, xproto)) # Check to see if we've already run xosgenx. If so, don't run it again. context_fn = dest_fn + ".context" this_context = (xos_dir, services_dir, service_xprotos, target) need_xosgenx = True if os.path.exists(context_fn): try: context = cPickle.loads(open(context_fn).read()) if context == this_context: return except (cPickle.UnpicklingError, EOFError): # Something went wrong with the file read or depickling pass if os.path.exists(context_fn): os.remove(context_fn) if os.path.exists(dest_fn): os.remove(dest_fn) p = subprocess.Popen( " ".join(args) + " > " + dest_fn, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) (stdoutdata, stderrdata) = p.communicate() if (p.returncode != 0) or (not os.path.exists(dest_fn)): raise Exception( "Failed to create mock model accessor, returncode=%d, stdout=%s" % (p.returncode, stdoutdata) ) # Save the context of this invocation of xosgenx open(context_fn, "w").write(cPickle.dumps(this_context))
gridgo/gridgo-commons
gridgo-bean/src/main/java/io/gridgo/bean/BJsonSupport.java
<filename>gridgo-bean/src/main/java/io/gridgo/bean/BJsonSupport.java package io.gridgo.bean; import java.io.ByteArrayOutputStream; import java.io.OutputStream; import io.gridgo.bean.exceptions.InvalidTypeException; import io.gridgo.bean.serialization.BSerializerRegistryAware; public interface BJsonSupport extends BSerializerRegistryAware { <T> T toJsonElement(); default void writeJson(OutputStream out) { if (this instanceof BElement) lookupSerializer("json").serialize((BElement) this, out); else throw new InvalidTypeException("writeJson by default only support BElement"); } default String toJson() { ByteArrayOutputStream out = new ByteArrayOutputStream(); writeJson(out); return new String(out.toByteArray()); } }
ant0ine/phantomjs
src/qt/src/3rdparty/webkit/Source/WebCore/platform/graphics/qt/ContextShadowQt.cpp
<filename>src/qt/src/3rdparty/webkit/Source/WebCore/platform/graphics/qt/ContextShadowQt.cpp /* * Copyright (C) 2010 Sencha, Inc. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "config.h" #include "ContextShadow.h" #include "AffineTransform.h" #include "GraphicsContext.h" #include <QPainter> #include <QTimerEvent> namespace WebCore { // ContextShadow needs a scratch image as the buffer for the blur filter. // Instead of creating and destroying the buffer for every operation, // we create a buffer which will be automatically purged via a timer. class ShadowBuffer: public QObject { public: ShadowBuffer(QObject* parent = 0); QImage* scratchImage(const QSize& size); void schedulePurge(); protected: void timerEvent(QTimerEvent* event); private: QImage image; int timerId; }; ShadowBuffer::ShadowBuffer(QObject* parent) : QObject(parent) , timerId(-1) { } QImage* ShadowBuffer::scratchImage(const QSize& size) { int width = size.width(); int height = size.height(); // We do not need to recreate the buffer if the buffer is reasonably // larger than the requested size. However, if the requested size is // much smaller than our buffer, reduce our buffer so that we will not // keep too many allocated pixels for too long. if (!image.isNull() && (image.width() > width) && (image.height() > height)) if (((2 * width) > image.width()) && ((2 * height) > image.height())) { image.fill(0); return &image; } // Round to the nearest 32 pixels so we do not grow the buffer everytime // there is larger request by 1 pixel. width = (1 + (width >> 5)) << 5; height = (1 + (height >> 5)) << 5; image = QImage(width, height, QImage::Format_ARGB32_Premultiplied); image.fill(0); return &image; } void ShadowBuffer::schedulePurge() { static const double BufferPurgeDelay = 2; // seconds if (timerId >= 0) killTimer(timerId); timerId = startTimer(BufferPurgeDelay * 1000); } void ShadowBuffer::timerEvent(QTimerEvent* event) { if (event->timerId() == timerId) { killTimer(timerId); image = QImage(); } QObject::timerEvent(event); } Q_GLOBAL_STATIC(ShadowBuffer, scratchShadowBuffer) PlatformContext ContextShadow::beginShadowLayer(GraphicsContext* context, const FloatRect& layerArea) { // Set m_blurDistance. adjustBlurDistance(context); PlatformContext p = context->platformContext(); QRect clipRect; if (p->hasClipping()) #if QT_VERSION >= QT_VERSION_CHECK(4, 8, 0) clipRect = p->clipBoundingRect().toAlignedRect(); #else clipRect = p->clipRegion().boundingRect(); #endif else clipRect = p->transform().inverted().mapRect(p->window()); // Set m_layerOrigin, m_layerContextTranslation, m_sourceRect. IntRect clip(clipRect.x(), clipRect.y(), clipRect.width(), clipRect.height()); IntRect layerRect = calculateLayerBoundingRect(context, layerArea, clip); // Don't paint if we are totally outside the clip region. if (layerRect.isEmpty()) return 0; ShadowBuffer* shadowBuffer = scratchShadowBuffer(); QImage* shadowImage = shadowBuffer->scratchImage(layerRect.size()); m_layerImage = QImage(*shadowImage); m_layerContext = new QPainter; m_layerContext->begin(&m_layerImage); m_layerContext->setFont(p->font()); m_layerContext->translate(m_layerContextTranslation); return m_layerContext; } void ContextShadow::endShadowLayer(GraphicsContext* context) { m_layerContext->end(); delete m_layerContext; m_layerContext = 0; if (m_type == BlurShadow) { blurLayerImage(m_layerImage.bits(), IntSize(m_layerImage.width(), m_layerImage.height()), m_layerImage.bytesPerLine()); } if (m_type != NoShadow) { // "Colorize" with the right shadow color. QPainter p(&m_layerImage); p.setCompositionMode(QPainter::CompositionMode_SourceIn); p.fillRect(m_layerImage.rect(), m_color.rgb()); p.end(); } context->platformContext()->drawImage(m_layerOrigin, m_layerImage, m_sourceRect); scratchShadowBuffer()->schedulePurge(); } }
KyLeoHC/project-startup
src/utils/dom.js
<reponame>KyLeoHC/project-startup let passiveSupported = false; try { let options = { get passive() { // This function will be called when the browser // attempts to access the passive property. passiveSupported = true; } }; window.addEventListener('test', options, options); window.removeEventListener('test', options, options); } catch (err) { passiveSupported = false; } /** * 事件分发代理函数 * @param event * @private */ const _processor = function (event) { const namespaceObj = event.currentTarget.__event[event.type]; Object.keys(namespaceObj) .forEach(key => { namespaceObj[key].forEach(handler => { handler(event); }); }); }; /** * 绑定事件,支持命名空间绑定,比如 'click.show'(注意,不支持多级命名空间) * 提倡绑定事件的时候指定命名空间,也方便于移除事件绑定 * @param el * @param event 比如'click.show'的形式 * @param handler */ const on = (el, event, handler) => { if (!el || !event || !handler) return; const [name, namespace = 'default'] = event.split('.'); if (!el.__event) { el.__event = {}; el.addEventListener(name, _processor, passiveSupported ? { passive: true } : false); } el.__event[name] = el.__event[name] || {}; el.__event[name][namespace] = el.__event[name][namespace] || []; el.__event[name][namespace].push(handler); }; /** * 移除事件绑定 * @param el * @param event 比如'click.show'的形式 */ const off = (el, event) => { if (!el || !event) return; const [name, namespace = 'default'] = event.split('.'); const namespaceObj = el.__event[name]; namespaceObj && delete namespaceObj[namespace]; if (!Object.keys(namespaceObj).length) { delete el.__event[name]; el.removeEventListener(name, _processor, passiveSupported ? { passive: true } : false); } }; /** * 绑定事件,执行一次之后就移除绑定 * @param el * @param event * @param handler */ const once = (el, event, handler) => { const listener = function () { handler && handler.apply(this, arguments); off(el, event); }; on(el, event, listener); }; /** * 获取偏移量(可以理解为相对文档页面顶部和左侧的偏移量) * @param el * @returns {number} */ const computeOffset = el => { let offsetTop = 0; let offsetLeft = 0; let parentEl = el.offsetParent; while (parentEl) { offsetTop += parentEl.offsetTop; offsetLeft += parentEl.offsetLeft; parentEl = parentEl.offsetParent; } offsetTop += el.offsetTop; offsetLeft += el.offsetLeft; return { offsetTop, offsetLeft }; }; export { on, off, once, passiveSupported, computeOffset };
LukasHeinz/shopsys
packages/framework/assets/js/admin/jquery.js
<reponame>LukasHeinz/shopsys /* * Why? * * We temporarily use jsFormValidateBundle Bundle and we load it before webpack build file and this bundle needs jquery to register itself. * * We create jquery entry from this file in webpack config file and load it before jsFormValidateBundle. * * This will be deleted after updating jsFormValidateBundle * */ import $ from 'jquery'; global.$ = global.jQuery = $;
baoxingsong/dCNS
model/Node.cpp
<filename>model/Node.cpp // // Created by <NAME> on 2019-01-08. // #include "Node.h" Node::Node(int _v, int _w){ v = _v; weight = _w; } int Node::getV(){ return v; } int Node::getWeight() { return weight; }
Ryanhindman6654/agentbot-master
functions/utils.js
const random = require('random-number-csprng'); const { MessageEmbed } = require('discord.js'); const axios = require('axios'); const jimp = require('jimp'); module.exports = { getMember: function(message, toFind = '') { toFind = toFind.toLowerCase(); let target = message.guild.members.cache.get(toFind); if (!target && message.mentions.members) target = message.mentions.members.first(); if (!target && toFind) { target = message.guild.members.cache.find(member => { return member.displayName.toLowerCase().includes(toFind) || member.user.tag.toLowerCase().includes(toFind); }); } if (!target) target = message.member; return target; }, formatDate: function(date) { return new Intl.DateTimeFormat('en-US').format(date); }, promptMessage: async function(message, author, time, validReactions) { // We put in the time as seconds, with this it's being transfered to MS time *= 1000; // For every emoji in the function parameters, react in the good order. for (const reaction of validReactions) await message.react(reaction); // Only allow reactions from the author, // and the emoji must be in the array we provided. const filter = (reaction, user) => validReactions.includes(reaction.emoji.name) && user.id === author.id; // And ofcourse, await the reactions return message .awaitReactions(filter, { max: 1, time: time }) .then(collected => collected.first() && collected.first().emoji.name); }, pages: function(arr, itemsPerPage, page = 1) { const maxPages = Math.ceil(arr.length / itemsPerPage); if (page < 1 || page > maxPages) return null; return arr.slice((page - 1) * itemsPerPage, page * itemsPerPage); }, sleep: function(miliseconds) { const start = new Date().getTime(); for (let i = 0; i < 1e7; i++) { if ((new Date().getTime() - start) > miliseconds) { break; } } }, randomcard: async function(listofcard) { if (!Array.isArray(listofcard)) return null; const num = await random(0, listofcard.length - 1); const cards = listofcard[num]; return cards; }, checkautowin: function(list) { let aces = 0; let jqk = 0; if (list.length !== 2) return { check: false, data: { aces: aces, jqk: jqk } }; for (let i = 0; i < list.length; i++) { if (!isNaN(list[i].slice(2, 3)) && list[i].slice(2, 3) !== '1') continue; else if (list[i].slice(2, 3).toLowerCase() == 'a') aces++; else if (list[i].slice(2, 3).toLowerCase() == 'j' || list[i].slice(2, 3).toLowerCase() == 'q' || list[i].slice(2, 3).toLowerCase() == 'k' || list[i].slice(2, 3) == '1') jqk++; } if (aces == 1 && jqk == 1) return { check: true, loaiwin: "xidach", data: { aces: aces, jqk: jqk } }; else if (aces == 2) return { check: true, loaiwin: "xibang", data: { aces: aces, jqk: jqk } }; else return { check: false, data: { aces: aces, jqk: jqk } }; }, getcardvalue: function(list) { let point = 0; let aces = 0; for (let i = 0; i < list.length; i++) { const cardname = list[i].slice(2, 3); if (!isNaN(cardname)) { switch(parseInt(cardname)) { case 1: point = point + 10; break; default: point = point + parseInt(cardname); break; } } else { switch(cardname) { case "a": aces++; break; default: point = point + 10; break; } } } if (aces == 0) return point.toString(); else { for (let y = 0; y < aces; y++) { if (point > 10) point++; else point = point + 11; } return `${point}*`; } }, createembed: function(nguoichoi, bet, deck_user, deck_bot, nguoichoi_val, bot_val, hidden_deck, end) { const embed = new MessageEmbed() .setColor("#00FFFF") .setFooter('Game đang diễn ra') .setAuthor(`${nguoichoi.tag}, bạn đã cược ${bet} để chơi xì dách!`, nguoichoi.displayAvatarURL()) .setFooter("Đang chơi!"); if (end == 'thang') { embed.setColor("#90EE90"); embed.footer.text = `Bạn thắng ${bet} tiền!`; embed.addFields( { name: `Bot: [${bot_val}]`, value: deck_bot }, { name: `User: [${nguoichoi_val}]`, value: deck_user }, ); } else if (end == 'thua') { embed.setColor("#FF0000"); embed.footer.text = `Bạn thua ${bet} tiền!`; embed.addFields( { name: `Bot: [${bot_val}]`, value: deck_bot }, { name: `User: [${nguoichoi_val}]`, value: deck_user }, ); } else if (end == 'hoa') { embed.setColor("#D3D3D3"); embed.footer.text = `Bạn không mất tiền cho trận đấu này`; embed.addFields( { name: `Bot: [${bot_val}]`, value: deck_bot }, { name: `User: [${nguoichoi_val}]`, value: deck_user }, ); } else if (end == 'thangx2') { embed.setColor("#90EE90"); embed.footer.text = `Bạn thắng ${parseInt(bet.replace(',', '')) * 2} tiền!`; embed.addFields( { name: `Bot: [${bot_val}]`, value: deck_bot }, { name: `User: [${nguoichoi_val}]`, value: deck_user }, ); } else if (end == 'not') { embed.addFields( { name: `Bot: [?]`, value: hidden_deck }, { name: `User: [${nguoichoi_val}]`, value: deck_user }, ); } return embed; }, laysodep: function(num) { const pattern = /\B(?=(\d{3})+(?!\d))/g; return num.toString().replace(pattern, ','); }, createembedfield: function(deck) { if (!Array.isArray(deck)) return null; let line = ""; deck.forEach(card => { line += card; }); return line; }, locbai : function(listOfCard, deck) { if (!Array.isArray(listOfCard) || !Array.isArray(deck)) return null; return listOfCard.filter(item => !deck.includes(item)); }, checkemptyobject: function(obj) { if (!obj) return true; for (const key in obj) { if (obj.hasOwnProperty(key)) return false; } return true; }, trimArray: function(arr, maxLen) { if (arr.length > maxLen) { const len = arr.length - maxLen; arr = arr.slice(0, maxLen); arr.push(`${len} more....`); } return arr; }, formatBytes: function(bytes) { if (bytes === 0) return '0 Bytes'; const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB']; const i = Math.floor(Math.log(bytes) / Math.log(1024)); return `${parseFloat((bytes / Math.pow(1024, i)).toFixed(2))} ${sizes[i]}`; }, getIDs: function(cache) { let result = ""; cache.forEach(function(ele, key, map) { result += "money_" + key + ","; }); return result.slice(0, -1); }, getunplash: async function(query) { if (!query) throw new Error('Query is empty!'); const unsplashapikey = process.env.UNSPLASH; try { const response = await axios.get(`https://api.unsplash.com/photos/random/`, { headers: { "Authorization": `Client-ID ${unsplashapikey}` }, params: { query: query, count: 1 }, }); const json = response.data[0]; const embed = new MessageEmbed() .setTitle('Click vào để download') .setURL(json.links.download) .setImage(json.urls.small) .setFooter(`Photo by ${json.user.name} at unsplash.com`); return embed; } catch(e) { return null; } }, capitalizeWords: function(string) { return string.replace(/(?!^[0-9])(^|[^a-zA-Z\u00C0-\u017F\u0400-\u04FF'])([a-zA-Z\u00C0-\u017F\u0400-\u04FF])/g, function(m) { return m.toUpperCase(); }); }, };
windystrife/UnrealEngine_NVIDIAGameWork
Engine/Source/Editor/MeshPaint/Private/MeshPaintSkeletalMeshAdapter.cpp
// Copyright 1998-2017 Epic Games, Inc. All Rights Reserved. #include "MeshPaintSkeletalMeshAdapter.h" #include "Engine/SkeletalMesh.h" #include "PhysicsEngine/BodySetup.h" #include "MeshPaintHelpers.h" #include "MeshPaintTypes.h" #include "ComponentReregisterContext.h" ////////////////////////////////////////////////////////////////////////// // FMeshPaintGeometryAdapterForSkeletalMeshes FMeshPaintGeometryAdapterForSkeletalMeshes::FMeshToComponentMap FMeshPaintGeometryAdapterForSkeletalMeshes::MeshToComponentMap; bool FMeshPaintGeometryAdapterForSkeletalMeshes::Construct(UMeshComponent* InComponent, int32 InMeshLODIndex) { SkeletalMeshComponent = Cast<USkeletalMeshComponent>(InComponent); if (SkeletalMeshComponent != nullptr) { SkeletalMeshChangedHandle = SkeletalMeshComponent->RegisterOnSkeletalMeshPropertyChanged(USkeletalMeshComponent::FOnSkeletalMeshPropertyChanged::CreateRaw(this, &FMeshPaintGeometryAdapterForSkeletalMeshes::OnSkeletalMeshChanged)); if (SkeletalMeshComponent->SkeletalMesh != nullptr) { ReferencedSkeletalMesh = SkeletalMeshComponent->SkeletalMesh; MeshLODIndex = InMeshLODIndex; const bool bSuccess = Initialize(); return bSuccess; } } return false; } FMeshPaintGeometryAdapterForSkeletalMeshes::~FMeshPaintGeometryAdapterForSkeletalMeshes() { if (SkeletalMeshComponent != nullptr) { SkeletalMeshComponent->UnregisterOnSkeletalMeshPropertyChanged(SkeletalMeshChangedHandle); } } void FMeshPaintGeometryAdapterForSkeletalMeshes::OnSkeletalMeshChanged() { OnRemoved(); ReferencedSkeletalMesh = SkeletalMeshComponent->SkeletalMesh; if (SkeletalMeshComponent->SkeletalMesh != nullptr) { Initialize(); OnAdded(); } } bool FMeshPaintGeometryAdapterForSkeletalMeshes::Initialize() { check(ReferencedSkeletalMesh == SkeletalMeshComponent->SkeletalMesh); bool bInitializationResult = false; MeshResource = ReferencedSkeletalMesh->GetImportedResource(); if (MeshResource != nullptr) { LODModel = &MeshResource->LODModels[MeshLODIndex]; bInitializationResult = FBaseMeshPaintGeometryAdapter::Initialize(); } return bInitializationResult; } bool FMeshPaintGeometryAdapterForSkeletalMeshes::InitializeVertexData() { // Retrieve mesh vertex and index data const int32 NumVertices = LODModel->NumVertices; MeshVertices.Reset(); MeshVertices.AddDefaulted(NumVertices); for (int32 Index = 0; Index < NumVertices; Index++) { const FVector& Position = LODModel->VertexBufferGPUSkin.GetVertexPositionSlow(Index); MeshVertices[Index] = Position; } MeshIndices.Reserve(LODModel->MultiSizeIndexContainer.GetIndexBuffer()->Num()); LODModel->MultiSizeIndexContainer.GetIndexBuffer(MeshIndices); return (MeshVertices.Num() >= 0 && MeshIndices.Num() > 0); } void FMeshPaintGeometryAdapterForSkeletalMeshes::InitializeAdapterGlobals() { static bool bInitialized = false; if (!bInitialized) { bInitialized = true; MeshToComponentMap.Empty(); } } void FMeshPaintGeometryAdapterForSkeletalMeshes::OnAdded() { checkf(SkeletalMeshComponent, TEXT("Invalid SkeletalMesh Component")); checkf(ReferencedSkeletalMesh, TEXT("Invalid reference to Skeletal Mesh")); checkf(ReferencedSkeletalMesh == SkeletalMeshComponent->SkeletalMesh, TEXT("Referenced Skeletal Mesh does not match one in Component")); FSkeletalMeshReferencers& SkeletalMeshReferencers = MeshToComponentMap.FindOrAdd(ReferencedSkeletalMesh); checkf(!SkeletalMeshReferencers.Referencers.ContainsByPredicate( [=](const FSkeletalMeshReferencers::FReferencersInfo& Info) { return Info.SkeletalMeshComponent == this->SkeletalMeshComponent; }), TEXT("This Skeletal Mesh Component has already been Added")); // If this is the first attempt to add a temporary body setup to the mesh, do it if (SkeletalMeshReferencers.Referencers.Num() == 0) { // Remember the old body setup (this will be added as a GC reference so that it doesn't get destroyed) SkeletalMeshReferencers.RestoreBodySetup = ReferencedSkeletalMesh->BodySetup; if (SkeletalMeshReferencers.RestoreBodySetup) { // Create a new body setup from the mesh's main body setup. This has to have the skeletal mesh as its outer, // otherwise the body instance will not be created correctly. UBodySetup* TempBodySetupRaw = DuplicateObject<UBodySetup>(ReferencedSkeletalMesh->BodySetup, ReferencedSkeletalMesh); TempBodySetupRaw->ClearFlags(RF_Transactional); // Set collide all flag so that the body creates physics meshes using ALL elements from the mesh not just the collision mesh. TempBodySetupRaw->bMeshCollideAll = true; // This forces it to recreate the physics mesh. TempBodySetupRaw->InvalidatePhysicsData(); // Force it to use high detail tri-mesh for collisions. TempBodySetupRaw->CollisionTraceFlag = CTF_UseComplexAsSimple; TempBodySetupRaw->AggGeom.ConvexElems.Empty(); // Set as new body setup ReferencedSkeletalMesh->BodySetup = TempBodySetupRaw; } } SkeletalMeshComponent->bUseRefPoseOnInitAnim = true; SkeletalMeshComponent->InitAnim(true); ECollisionEnabled::Type CachedCollisionType = SkeletalMeshComponent->BodyInstance.GetCollisionEnabled(); SkeletalMeshReferencers.Referencers.Emplace(SkeletalMeshComponent, CachedCollisionType); // Force the collision type to not be 'NoCollision' without it the line trace will always fail. if (CachedCollisionType == ECollisionEnabled::NoCollision) { SkeletalMeshComponent->BodyInstance.SetCollisionEnabled(ECollisionEnabled::QueryOnly, false); } // Set new physics state for the component SkeletalMeshComponent->RecreatePhysicsState(); } void FMeshPaintGeometryAdapterForSkeletalMeshes::OnRemoved() { checkf(SkeletalMeshComponent, TEXT("Invalid SkeletalMesh Component")); // If the referenced skeletal mesh has been destroyed (and nulled by GC), don't try to do anything more. // It should be in the process of removing all global geometry adapters if it gets here in this situation. if (!ReferencedSkeletalMesh) { return; } // Remove a reference from the skeletal mesh map FSkeletalMeshReferencers* SkeletalMeshReferencers = MeshToComponentMap.Find(ReferencedSkeletalMesh); checkf(SkeletalMeshReferencers, TEXT("Could not find Reference to Skeletal Mesh")); checkf(SkeletalMeshReferencers->Referencers.Num() > 0, TEXT("Skeletal Mesh does not have any referencers")); const int32 Index = SkeletalMeshReferencers->Referencers.IndexOfByPredicate( [=](const FSkeletalMeshReferencers::FReferencersInfo& Info) { return Info.SkeletalMeshComponent == this->SkeletalMeshComponent; } ); check(Index != INDEX_NONE); SkeletalMeshComponent->bUseRefPoseOnInitAnim = false; SkeletalMeshComponent->InitAnim(true); SkeletalMeshComponent->BodyInstance.SetCollisionEnabled(SkeletalMeshReferencers->Referencers[Index].CachedCollisionType, false); SkeletalMeshComponent->RecreatePhysicsState(); SkeletalMeshReferencers->Referencers.RemoveAtSwap(Index); // If the last reference was removed, restore the body setup for the static mesh if (SkeletalMeshReferencers->Referencers.Num() == 0) { if (SkeletalMeshReferencers->RestoreBodySetup != nullptr) { ReferencedSkeletalMesh->BodySetup = SkeletalMeshReferencers->RestoreBodySetup; } verify(MeshToComponentMap.Remove(ReferencedSkeletalMesh) == 1); } } bool FMeshPaintGeometryAdapterForSkeletalMeshes::LineTraceComponent(struct FHitResult& OutHit, const FVector Start, const FVector End, const struct FCollisionQueryParams& Params) const { const bool bHitBounds = FMath::LineSphereIntersection(Start, End.GetSafeNormal(), (End - Start).SizeSquared(), SkeletalMeshComponent->Bounds.Origin, SkeletalMeshComponent->Bounds.SphereRadius); const float SqrRadius = FMath::Square(SkeletalMeshComponent->Bounds.SphereRadius); const bool bInsideBounds = (SkeletalMeshComponent->Bounds.ComputeSquaredDistanceFromBoxToPoint(Start) <= SqrRadius) || (SkeletalMeshComponent->Bounds.ComputeSquaredDistanceFromBoxToPoint(End) <= SqrRadius); const bool bHitPhysicsBodies = SkeletalMeshComponent->LineTraceComponent(OutHit, Start, End, Params); bool bHitTriangle = false; if ((bHitBounds || bInsideBounds) && !bHitPhysicsBodies) { const int32 NumTriangles = MeshIndices.Num() / 3; const FTransform& ComponentTransform = SkeletalMeshComponent->GetComponentTransform(); const FTransform InverseComponentTransform = ComponentTransform.Inverse(); const FVector LocalStart = InverseComponentTransform.TransformPosition(Start); const FVector LocalEnd = InverseComponentTransform.TransformPosition(End); float MinDistance = FLT_MAX; FVector Intersect; FVector Normal; for (int32 TriangleIndex = 0; TriangleIndex < NumTriangles; ++TriangleIndex) { // Compute the normal of the triangle const FVector& P0 = MeshVertices[MeshIndices[(TriangleIndex * 3) + 0]]; const FVector& P1 = MeshVertices[MeshIndices[(TriangleIndex * 3) + 1]]; const FVector& P2 = MeshVertices[MeshIndices[(TriangleIndex * 3) + 2]]; const FVector TriNorm = (P1 - P0) ^ (P2 - P0); //check collinearity of A,B,C if (TriNorm.SizeSquared() > SMALL_NUMBER) { FVector IntersectPoint; FVector HitNormal; bool bHit = FMath::SegmentTriangleIntersection(LocalStart, LocalEnd, P0, P1, P2, IntersectPoint, HitNormal); if (bHit) { const float Distance = (LocalStart - IntersectPoint).SizeSquared(); if (Distance < MinDistance) { MinDistance = Distance; Intersect = IntersectPoint; Normal = HitNormal; } } } } if (MinDistance != FLT_MAX) { OutHit.Component = SkeletalMeshComponent; OutHit.Normal = Normal.GetSafeNormal(); OutHit.Location = ComponentTransform.TransformPosition(Intersect); OutHit.bBlockingHit = true; bHitTriangle = true; } } return bHitPhysicsBodies || bHitTriangle; } void FMeshPaintGeometryAdapterForSkeletalMeshes::QueryPaintableTextures(int32 MaterialIndex, int32& OutDefaultIndex, TArray<struct FPaintableTexture>& InOutTextureList) { DefaultQueryPaintableTextures(MaterialIndex, SkeletalMeshComponent, OutDefaultIndex, InOutTextureList); } void FMeshPaintGeometryAdapterForSkeletalMeshes::ApplyOrRemoveTextureOverride(UTexture* SourceTexture, UTexture* OverrideTexture) const { DefaultApplyOrRemoveTextureOverride(SkeletalMeshComponent, SourceTexture, OverrideTexture); } void FMeshPaintGeometryAdapterForSkeletalMeshes::AddReferencedObjects(FReferenceCollector& Collector) { if (!ReferencedSkeletalMesh) { return; } FSkeletalMeshReferencers* SkeletalMeshReferencers = MeshToComponentMap.Find(ReferencedSkeletalMesh); checkf(SkeletalMeshReferencers, TEXT("No references found for Skeletal Mesh")); if (SkeletalMeshReferencers->RestoreBodySetup != nullptr) { Collector.AddReferencedObject(SkeletalMeshReferencers->RestoreBodySetup); } for (auto& Info : SkeletalMeshReferencers->Referencers) { Collector.AddReferencedObject(Info.SkeletalMeshComponent); } } void FMeshPaintGeometryAdapterForSkeletalMeshes::GetTextureCoordinate(int32 VertexIndex, int32 ChannelIndex, FVector2D& OutTextureCoordinate) const { OutTextureCoordinate = LODModel->VertexBufferGPUSkin.GetVertexUVFast(VertexIndex, ChannelIndex); } void FMeshPaintGeometryAdapterForSkeletalMeshes::PreEdit() { FlushRenderingCommands(); SkeletalMeshComponent->Modify(); ReferencedSkeletalMesh->SetFlags(RF_Transactional); ReferencedSkeletalMesh->Modify(); ReferencedSkeletalMesh->bHasVertexColors = true; // Release the static mesh's resources. ReferencedSkeletalMesh->ReleaseResources(); // Flush the resource release commands to the rendering thread to ensure that the build doesn't occur while a resource is still // allocated, and potentially accessing the UStaticMesh. ReferencedSkeletalMesh->ReleaseResourcesFence.Wait(); if (LODModel->ColorVertexBuffer.GetNumVertices() == 0) { // Mesh doesn't have a color vertex buffer yet! We'll create one now. LODModel->ColorVertexBuffer.InitFromSingleColor(FColor(255, 255, 255, 255), LODModel->NumVertices); ReferencedSkeletalMesh->bHasVertexColors = true; BeginInitResource(&LODModel->ColorVertexBuffer); } } void FMeshPaintGeometryAdapterForSkeletalMeshes::PostEdit() { TUniquePtr< FSkeletalMeshComponentRecreateRenderStateContext > RecreateRenderStateContext = MakeUnique<FSkeletalMeshComponentRecreateRenderStateContext>(ReferencedSkeletalMesh); ReferencedSkeletalMesh->InitResources(); } void FMeshPaintGeometryAdapterForSkeletalMeshes::GetVertexColor(int32 VertexIndex, FColor& OutColor, bool bInstance /*= true*/) const { if (LODModel->ColorVertexBuffer.GetNumVertices() > 0) { check((int32)LODModel->ColorVertexBuffer.GetNumVertices() > VertexIndex); OutColor = LODModel->ColorVertexBuffer.VertexColor(VertexIndex); } } void FMeshPaintGeometryAdapterForSkeletalMeshes::SetVertexColor(int32 VertexIndex, FColor Color, bool bInstance /*= true*/) { if (LODModel->ColorVertexBuffer.GetNumVertices() > 0) { LODModel->ColorVertexBuffer.VertexColor(VertexIndex) = Color; if (ReferencedSkeletalMesh->LODInfo[MeshLODIndex].bHasPerLODVertexColors) { ReferencedSkeletalMesh->LODInfo[MeshLODIndex].bHasPerLODVertexColors = true; } } } FMatrix FMeshPaintGeometryAdapterForSkeletalMeshes::GetComponentToWorldMatrix() const { return SkeletalMeshComponent->GetComponentToWorld().ToMatrixWithScale(); } ////////////////////////////////////////////////////////////////////////// // FMeshPaintGeometryAdapterForSkeletalMeshesFactory TSharedPtr<IMeshPaintGeometryAdapter> FMeshPaintGeometryAdapterForSkeletalMeshesFactory::Construct(class UMeshComponent* InComponent, int32 InMeshLODIndex) const { if (USkeletalMeshComponent* SkeletalMeshComponent = Cast<USkeletalMeshComponent>(InComponent)) { if (SkeletalMeshComponent->SkeletalMesh != nullptr) { TSharedRef<FMeshPaintGeometryAdapterForSkeletalMeshes> Result = MakeShareable(new FMeshPaintGeometryAdapterForSkeletalMeshes()); if (Result->Construct(InComponent, InMeshLODIndex)) { return Result; } } } return nullptr; }
karminea/gamstertv
node_modules/twitch/lib/API/Unsupported/UnsupportedAPI.js
<reponame>karminea/gamstertv<gh_stars>0 "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var tslib_1 = require("tslib"); var Cache_1 = require("../../Toolkit/Decorators/Cache"); var BaseAPI_1 = require("../BaseAPI"); var ChattersList_1 = require("./ChattersList"); var UserTools_1 = require("../../Toolkit/UserTools"); var ChannelEvent_1 = require("./ChannelEvent"); var TwitchClient_1 = require("../../TwitchClient"); /** * Different API methods that are not officially supported by Twitch. * * Can be accessed using `client.unsupported` on a {@TwitchClient} instance. * * ## Example * ```ts * const client = await TwitchClient.withCredentials(clientId, accessToken); * const events = await client.unsupported.getEvents('125328655'); * ``` */ var UnsupportedAPI = /** @class */ (function (_super) { tslib_1.__extends(UnsupportedAPI, _super); function UnsupportedAPI() { return _super !== null && _super.apply(this, arguments) || this; } /** * Retrieves a list of chatters in the Twitch chat of the given channel. * * **WARNING**: In contrast to most other methods, this takes a channel *name*, not a user ID. * * @param channel The channel to retrieve the chatters for. */ UnsupportedAPI.prototype.getChatters = function (channel) { return tslib_1.__awaiter(this, void 0, void 0, function () { var channelName, data; return tslib_1.__generator(this, function (_a) { switch (_a.label) { case 0: channelName = UserTools_1.extractUserName(channel); return [4 /*yield*/, this._client.callAPI({ url: "https://tmi.twitch.tv/group/user/" + channelName + "/chatters", type: TwitchClient_1.TwitchAPICallType.Custom })]; case 1: data = _a.sent(); return [2 /*return*/, new ChattersList_1.default(data)]; } }); }); }; /** * Retrieves a list of event planned for the given channel. * * @param channel The channel to retrieve the events for. */ UnsupportedAPI.prototype.getEvents = function (channel) { return tslib_1.__awaiter(this, void 0, void 0, function () { var channelId, data; var _this = this; return tslib_1.__generator(this, function (_a) { switch (_a.label) { case 0: channelId = UserTools_1.extractUserId(channel); return [4 /*yield*/, this._client.callAPI({ url: "channels/" + channelId + "/events" })]; case 1: data = _a.sent(); return [2 /*return*/, data.events.map(function (event) { return new ChannelEvent_1.default(event, _this._client); })]; } }); }); }; tslib_1.__decorate([ Cache_1.Cached(60) ], UnsupportedAPI.prototype, "getChatters", null); tslib_1.__decorate([ Cache_1.Cached(60) ], UnsupportedAPI.prototype, "getEvents", null); UnsupportedAPI = tslib_1.__decorate([ Cache_1.Cacheable ], UnsupportedAPI); return UnsupportedAPI; }(BaseAPI_1.default)); exports.default = UnsupportedAPI; //# sourceMappingURL=UnsupportedAPI.js.map
isabellaliu77/kaspad
protocol/blocklogger/log.go
<gh_stars>0 // Copyright (c) 2017 The btcsuite developers // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. package blocklogger import ( "github.com/kaspanet/kaspad/logger" ) var log, _ = logger.Get(logger.SubsystemTags.PROT)
gadge/foba
packages/tabulars/crostab/gulp/gulpfile.AoEII.crostab.save.js
<gh_stars>0 import { toTable } from '@analys/convert' import { MUT } from '@analys/enum-mutabilities' import { AVERAGE } from '@analys/enum-pivot-mode' import { NUM_ASC, NUM_DESC } from '@aryth/comparer' import { roundD1 } from '@aryth/math' import { esvar } from '@flua/utils' import { Vinylize } from '@flua/vinylize' import { AoEIIUnits } from '@foba/table/resources/real/AoEIIUnits' import { decoCrostab, says } from '@spare/logger' import { Verse } from '@spare/verse' import { isNumeric } from '@typen/num-strict' import gulp from 'gulp' const RANKED_AGES = ['Dark', 'Feudal', 'Castle', 'Imperial'] const RANKED_BUILDINGS = ['Barracks', 'Archery Range', 'Stable', 'Siege Workshop'] const DEST = 'packages/tabulars/crostab/resources' const crosTabCollection = {} export const saveAoEIICrostab = async () => { /** @type {Table} */const table = AoEIIUnits|> toTable table .proliferateColumn( [ { key: 'age', to: x => RANKED_AGES.indexOf(x), as: 'ageIndex' }, { key: 'building', to: x => RANKED_BUILDINGS.indexOf(x), as: 'buildingIndex' }, ], MUT ) .sort('ageIndex', NUM_DESC, MUT) .sort('buildingIndex', NUM_ASC, MUT) crosTabCollection['AoEIIUnitsAttackByStages'] = table .crosTab({ side: 'age', banner: 'building', field: { attack: AVERAGE } }) .map(({ value }) => isNumeric(value) ? roundD1(value) : value, MUT) crosTabCollection['AoEIIUnitsHpByStages'] = table .crosTab({ side: 'age', banner: 'building', field: { hp: AVERAGE } }) .map(({ value }) => isNumeric(value) ? roundD1(value) : value, MUT) for (let [key, crosTab] of Object.entries(crosTabCollection)) { crosTab |> decoCrostab |> says[key] await Vinylize(key + '.js') .p(esvar(key)) .p(Verse.crostab(crosTab)) .asyncPipe(gulp.dest(DEST)) } }
larsw/rya
extras/indexing/src/main/java/org/apache/rya/api/client/mongo/MongoCreatePCJ.java
<reponame>larsw/rya /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rya.api.client.mongo; import static java.util.Objects.requireNonNull; import java.util.Set; import org.apache.rya.api.client.CreatePCJ; import org.apache.rya.api.client.InstanceDoesNotExistException; import org.apache.rya.api.client.InstanceExists; import org.apache.rya.api.client.RyaClientException; import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException; import org.apache.rya.indexing.pcj.storage.mongo.MongoPcjStorage; import com.google.common.collect.Sets; import com.mongodb.MongoClient; /** * A Mongo implementation of {@link CreatePCJ}. */ public class MongoCreatePCJ implements CreatePCJ { private final InstanceExists instanceExists; private final MongoClient mongoClient; /** * Constructs an instance of {@link MongoCreatePCJ}. * * @param mongoClient - The {@link MongoClient} used to create a new PCJ. (not null) * @param instanceExists - The interactor used to check if a Rya instance exists. (not null) */ public MongoCreatePCJ( final MongoClient mongoClient, final MongoInstanceExists instanceExists) { this.mongoClient = requireNonNull(mongoClient); this.instanceExists = requireNonNull(instanceExists); } @Override public String createPCJ(final String ryaInstanceName, final String sparql, final Set<ExportStrategy> strategies) throws InstanceDoesNotExistException, RyaClientException { requireNonNull(ryaInstanceName); requireNonNull(sparql); // Ensure the Rya Instance exists. if (!instanceExists.exists(ryaInstanceName)) { throw new InstanceDoesNotExistException(String.format("There is no Rya instance named '%s'.", ryaInstanceName)); } try(final MongoPcjStorage pcjStore = new MongoPcjStorage(mongoClient, ryaInstanceName)) { return pcjStore.createPcj(sparql); } catch (final PCJStorageException e) { throw new RyaClientException("Unable to create PCJ for: " + sparql, e); } } @Override public String createPCJ(final String instanceName, final String sparql) throws InstanceDoesNotExistException, RyaClientException { return createPCJ(instanceName, sparql, Sets.newHashSet(ExportStrategy.RYA)); } }
op896898466/apitest
apps/interfacemocks/serializers.py
<filename>apps/interfacemocks/serializers.py # -*- coding: utf-8 -*- from rest_framework import serializers from .models import Interfacemocks class InterfacemocksSerializer(serializers.ModelSerializer): class Meta: model = Interfacemocks fields = '__all__'
vdkkia/seek
test/unit/openbis/openbis_external_asset_test.rb
<reponame>vdkkia/seek require 'test_helper' require 'openbis_test_helper' class OpenbisExternalAssetTest < ActiveSupport::TestCase def setup mock_openbis_calls @endpoint = Factory(:openbis_endpoint) @asset = OpenbisExternalAsset.new @asset.seek_service = @endpoint end test 'builds from Zample' do zample = Seek::Openbis::Zample.new(@endpoint, '20171002172111346-37') options = { tomek: false } asset = OpenbisExternalAsset.build(zample, options) assert_equal @endpoint, asset.seek_service assert_equal '20171002172111346-37', asset.external_id # 'https://openbis-api.fair-dom.org/openbis', # assert_equal @endpoint.web_endpoint, asset.external_service assert_equal @endpoint.id.to_s, asset.external_service assert_equal '2017-10-02T18:09:34+00:00', asset.external_mod_stamp assert_equal 'Seek::Openbis::Zample', asset.external_type assert asset.synchronized_at assert_equal 'synchronized', asset.sync_state assert asset.synchronized? assert_equal options, asset.sync_options assert_equal 1, asset.version refute asset.sync_options_json assert asset.valid? assert asset.save assert asset.sync_options_json assert asset.send(:local_content_json) assert_same zample, asset.content end test 'deserializes Zample from content' do zample = Seek::Openbis::Zample.new(@endpoint, '20171002172111346-37') @asset.external_type = zample.class.to_s json = @asset.serialize_content zample assert json entity = @asset.deserialize_content json assert entity assert_equal Seek::Openbis::Zample, entity.class assert_equal zample, entity end test 'builds from Dataset' do entity = Seek::Openbis::Dataset.new(@endpoint, '20160210130454955-23') options = { tomek: false } asset = OpenbisExternalAsset.build(entity, options) assert_equal @endpoint, asset.seek_service assert_equal '20160210130454955-23', asset.external_id # 'https://openbis-api.fair-dom.org/openbis' # assert_equal @endpoint.web_endpoint, asset.external_service assert_equal @endpoint.id.to_s, asset.external_service assert_equal '2016-02-10T12:04:55+00:00', asset.external_mod_stamp assert_equal 'Seek::Openbis::Dataset', asset.external_type assert asset.synchronized_at assert_equal 'synchronized', asset.sync_state assert asset.synchronized? assert_equal options, asset.sync_options assert_equal 1, asset.version refute asset.sync_options_json assert asset.valid? assert asset.save assert asset.sync_options_json assert asset.send(:local_content_json) assert_same entity, asset.content end test 'registered? works' do zample = Seek::Openbis::Zample.new(@endpoint, '20171002172111346-37') refute OpenbisExternalAsset.registered?(zample) asset = OpenbisExternalAsset.new(external_service: zample.openbis_endpoint.id, external_id: zample.perm_id) assert asset.save assert OpenbisExternalAsset.registered?(zample) end test 'needs_reindexing is always true for new record' do asset = OpenbisExternalAsset.new assert asset.needs_reindexing zample = Seek::Openbis::Zample.new(@endpoint, '20171002172111346-37') options = { tomek: false } asset = OpenbisExternalAsset.build(zample, options) assert asset.needs_reindexing asset.content = zample assert asset.needs_reindexing end test 'find_by_entity works' do zample = Seek::Openbis::Zample.new(@endpoint, '20171002172111346-37') assert_raises(ActiveRecord::RecordNotFound) do OpenbisExternalAsset.find_by_entity(zample) end asset = OpenbisExternalAsset.new(external_service: zample.openbis_endpoint.id, external_id: zample.perm_id) assert asset.save assert OpenbisExternalAsset.find_by_entity(zample) end test 'find_or_create_by_entity finds or creates' do zample = Seek::Openbis::Zample.new(@endpoint, '20171002172111346-37') asset = OpenbisExternalAsset.find_or_create_by_entity(zample) assert asset assert asset.is_a? OpenbisExternalAsset refute asset.persisted? assert asset.new_record? assert_same asset.content, zample assert asset.save! asset = OpenbisExternalAsset.find_or_create_by_entity(zample) assert asset assert asset.is_a? OpenbisExternalAsset assert asset.persisted? refute asset.new_record? assert_equal asset.content, zample end test 'openbis_search_terms' do dataset = Seek::Openbis::Dataset.new(@endpoint, '20160210130454955-23') asset = OpenbisExternalAsset.build(dataset) terms = asset.search_terms assert_includes terms, '20160210130454955-23' assert_includes terms, 'TEST_DATASET_TYPE' assert_includes terms, 'for api test' assert_includes terms, 'original/autumn.jpg' assert_includes terms, 'autumn.jpg' assert_includes terms, 'apiuser' # values form openbis parametes as well as key:value pairs assert_includes terms, 'DataFile_3' assert_includes terms, 'SEEK_DATAFILE_ID:DataFile_3' end test 'openbis_search_terms simplifies rich text content' do experiment = Seek::Openbis::Experiment.new(@endpoint, '20171121152132641-51') asset = OpenbisExternalAsset.build(experiment) terms = asset.search_terms goals = terms.select { |t| t.start_with? 'EXPERIMENTAL_GOALS' }.first comments = terms.select { |t| t.start_with? 'XMLCOMMENTS' }.first assert goals assert comments # puts goals # puts comments refute goals.include? 'body' refute goals.include? '<body>' refute goals.include? '<ul>' assert goals.include? 'many circadian clock-associated genes have been identified.' assert_equal 'XMLCOMMENTS:My first comment', comments end test 'removeTAGS cleans html tags' do experiment = Seek::Openbis::Experiment.new(@endpoint, '20171121152132641-51') asset = OpenbisExternalAsset.build(experiment) text = ' <?xml version="1.0" encoding="UTF-8"?> <body><ul class="big"> <li style="weight: bold;">In Arabidopsis thaliana, many circadian clock-associated genes have been identified.</li> </ul></body> ' res = asset.remove_tags(text) # puts res exp = 'In Arabidopsis thaliana, many circadian clock-associated genes have been identified.' assert_equal exp, res end test 'removeTAGS cleans xml coments' do experiment = Seek::Openbis::Experiment.new(@endpoint, '20171121152132641-51') asset = OpenbisExternalAsset.build(experiment) text = ' <root><commentEntry date=\"1511277676686\" person=\"seek\">My first comment</commentEntry></root> ' res = asset.remove_tags(text) # puts res exp = 'My first comment' assert_equal exp, res end test 'removeTAGS escapes <> in text' do experiment = Seek::Openbis::Experiment.new(@endpoint, '20171121152132641-51') asset = OpenbisExternalAsset.build(experiment) text = ' temp < 3 C but > 2' res = asset.remove_tags(text) # puts res exp = 'temp &lt; 3 C but &gt; 2' assert_equal exp, res end end