repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
qconner/mongo
src/mongo/db/auth/sasl_scram_server_conversation.cpp
/* * Copyright (C) 2014 MongoDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License, version 3, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * As a special exception, the copyright holders give permission to link the * code of portions of this program with the OpenSSL library under certain * conditions as described in each individual source file and distribute * linked combinations including the program with the OpenSSL library. You * must comply with the GNU Affero General Public License in all respects for * all of the code used other than as permitted herein. If you modify file(s) * with this exception, you may extend this exception to your version of the * file(s), but you are not obligated to do so. If you do not wish to do so, * delete this exception statement from your version. If you delete this * exception statement from all source files in the program, then also delete * it in the license file. */ #define MONGO_LOG_DEFAULT_COMPONENT ::mongo::logger::LogComponent::kAccessControl #include "mongo/platform/basic.h" #include "mongo/db/auth/sasl_scram_server_conversation.h" #include <boost/algorithm/string/join.hpp> #include <boost/algorithm/string/replace.hpp> #include "mongo/base/disallow_copying.h" #include "mongo/base/init.h" #include "mongo/base/status.h" #include "mongo/base/string_data.h" #include "mongo/crypto/mechanism_scram.h" #include "mongo/crypto/sha1_block.h" #include "mongo/db/auth/sasl_mechanism_policies.h" #include "mongo/db/auth/sasl_mechanism_registry.h" #include "mongo/db/auth/sasl_options.h" #include "mongo/platform/random.h" #include "mongo/util/base64.h" #include "mongo/util/log.h" #include "mongo/util/mongoutils/str.h" #include "mongo/util/sequence_util.h" #include "mongo/util/text.h" namespace mongo { template <typename Policy> StatusWith<std::tuple<bool, std::string>> SaslSCRAMServerMechanism<Policy>::stepImpl( OperationContext* opCtx, StringData inputData) { _step++; if (_step > 3 || _step <= 0) { return Status(ErrorCodes::AuthenticationFailed, str::stream() << "Invalid SCRAM authentication step: " << _step); } if (_step == 1) { return _firstStep(opCtx, inputData); } if (_step == 2) { return _secondStep(opCtx, inputData); } return std::make_tuple(true, std::string{}); } /* * RFC 5802 specifies that in SCRAM user names characters ',' and '=' are encoded as * =2C and =3D respectively. */ static void decodeSCRAMUsername(std::string& user) { boost::replace_all(user, "=2C", ","); boost::replace_all(user, "=3D", "="); } /* * Parse client-first-message of the form: * n,a=authzid,n=encoded-username,r=client-nonce * * Generate server-first-message on the form: * r=client-nonce|server-nonce,s=user-salt,i=iteration-count * * NOTE: we are ignoring the authorization ID part of the message */ template <typename Policy> StatusWith<std::tuple<bool, std::string>> SaslSCRAMServerMechanism<Policy>::_firstStep( OperationContext* opCtx, StringData inputData) { const auto badCount = [](int got) { return Status(ErrorCodes::BadValue, str::stream() << "Incorrect number of arguments for first SCRAM client message, got " << got << " expected at least 3"); }; /** * gs2-cbind-flag := ("p=" cb-name) / 'y' / 'n' * gs2-header := gs2-cbind-flag ',' [ authzid ] ',' * reserved-mext := "m=" 1*(value-char) * client-first-message-bare := [reserved-mext ','] username ',' nonce [',' extensions] * client-first-message := gs2-header client-first-message-bare */ const auto gs2_cbind_comma = inputData.find(','); if (gs2_cbind_comma == std::string::npos) { return badCount(1); } const auto gs2_cbind_flag = inputData.substr(0, gs2_cbind_comma); if (gs2_cbind_flag.startsWith("p=")) { return Status(ErrorCodes::BadValue, "Server does not support channel binding"); } if ((gs2_cbind_flag != "y") && (gs2_cbind_flag != "n")) { return Status(ErrorCodes::BadValue, str::stream() << "Incorrect SCRAM client message prefix: " << gs2_cbind_flag); } const auto gs2_header_comma = inputData.find(',', gs2_cbind_comma + 1); if (gs2_header_comma == std::string::npos) { return badCount(2); } auto authzId = inputData.substr(gs2_cbind_comma + 1, gs2_header_comma - (gs2_cbind_comma + 1)); if (authzId.size()) { if (authzId.startsWith("a=")) { authzId = authzId.substr(2); } else { return Status(ErrorCodes::BadValue, str::stream() << "Incorrect SCRAM authzid: " << authzId); } } const auto client_first_message_bare = inputData.substr(gs2_header_comma + 1); if (client_first_message_bare.startsWith("m=")) { return Status(ErrorCodes::BadValue, "SCRAM mandatory extensions are not supported"); } /* StringSplitter::split() will ignore consecutive delimiters. * e.g. "foo,,bar" => {"foo","bar"} * This makes our implementation of SCRAM *slightly* more generous * in what it will accept than the standard calls for. * * This does not impact _authMessage, as it's composed from the raw * string input, rather than the output of the split operation. */ const auto input = StringSplitter::split(client_first_message_bare.toString(), ","); if (input.size() < 2) { // gs2-header is not included in this count, so add it back in. return badCount(input.size() + 2); } if (!str::startsWith(input[0], "n=") || input[0].size() < 3) { return Status(ErrorCodes::BadValue, str::stream() << "Invalid SCRAM user name: " << input[0]); } ServerMechanismBase::_principalName = input[0].substr(2); decodeSCRAMUsername(ServerMechanismBase::_principalName); if (!authzId.empty() && ServerMechanismBase::_principalName != authzId) { return Status(ErrorCodes::BadValue, str::stream() << "SCRAM user name " << ServerMechanismBase::_principalName << " does not match authzid " << authzId); } if (!str::startsWith(input[1], "r=") || input[1].size() < 6) { return Status(ErrorCodes::BadValue, str::stream() << "Invalid SCRAM client nonce: " << input[1]); } const auto clientNonce = input[1].substr(2); // SERVER-16534, SCRAM-SHA-1 must be enabled for authenticating the internal user, so that // cluster members may communicate with each other. Hence ignore disabled auth mechanism // for the internal user. UserName user(ServerMechanismBase::ServerMechanismBase::_principalName, ServerMechanismBase::getAuthenticationDatabase()); if (!sequenceContains(saslGlobalParams.authenticationMechanisms, "SCRAM-SHA-1") && user != internalSecurity.user->getName()) { return Status(ErrorCodes::BadValue, "SCRAM-SHA-1 authentication is disabled"); } // The authentication database is also the source database for the user. User* userObj; auto authManager = AuthorizationManager::get(opCtx->getServiceContext()); Status status = authManager->acquireUser(opCtx, user, &userObj); if (!status.isOK()) { return status; } User::CredentialData credentials = userObj->getCredentials(); UserName userName = userObj->getName(); authManager->releaseUser(userObj); _scramCredentials = credentials.scram<HashBlock>(); if (!_scramCredentials.isValid()) { // Check for authentication attempts of the __system user on // systems started without a keyfile. if (userName == internalSecurity.user->getName()) { return Status(ErrorCodes::AuthenticationFailed, "It is not possible to authenticate as the __system user " "on servers started without a --keyFile parameter"); } else { return Status(ErrorCodes::AuthenticationFailed, "Unable to perform SCRAM authentication for a user with missing " "or invalid SCRAM credentials"); } } _secrets = scram::Secrets<HashBlock>("", base64::decode(_scramCredentials.storedKey), base64::decode(_scramCredentials.serverKey)); // Generate server-first-message // Create text-based nonce as base64 encoding of a binary blob of length multiple of 3 const int nonceLenQWords = 3; uint64_t binaryNonce[nonceLenQWords]; std::unique_ptr<SecureRandom> sr(SecureRandom::create()); binaryNonce[0] = sr->nextInt64(); binaryNonce[1] = sr->nextInt64(); binaryNonce[2] = sr->nextInt64(); _nonce = clientNonce + base64::encode(reinterpret_cast<char*>(binaryNonce), sizeof(binaryNonce)); StringBuilder sb; sb << "r=" << _nonce << ",s=" << _scramCredentials.salt << ",i=" << _scramCredentials.iterationCount; std::string outputData = sb.str(); // add client-first-message-bare and server-first-message to _authMessage _authMessage = client_first_message_bare.toString() + "," + outputData; return std::make_tuple(false, std::move(outputData)); } /** * Parse client-final-message of the form: * c=channel-binding(base64),r=client-nonce|server-nonce,p=ClientProof * * Generate successful authentication server-final-message on the form: * v=ServerSignature * * or failed authentication server-final-message on the form: * e=message * * NOTE: we are ignoring the channel binding part of the message **/ template <typename Policy> StatusWith<std::tuple<bool, std::string>> SaslSCRAMServerMechanism<Policy>::_secondStep( OperationContext* opCtx, StringData inputData) { const auto badCount = [](int got) { return Status(ErrorCodes::BadValue, str::stream() << "Incorrect number of arguments for second SCRAM client message, got " << got << " expected at least 3"); }; /** * client-final-message-without-proof := cbind ',' nonce ',' [ ',' extensions ] * client-final-message := client-final-message-without-proof ',' proof */ const auto last_comma = inputData.rfind(','); if (last_comma == std::string::npos) { return badCount(1); } // add client-final-message-without-proof to authMessage const auto client_final_message_without_proof = inputData.substr(0, last_comma); _authMessage += "," + client_final_message_without_proof.toString(); const auto last_field = inputData.substr(last_comma + 1); if ((last_field.size() < 3) || !last_field.startsWith("p=")) { return Status(ErrorCodes::BadValue, str::stream() << "Incorrect SCRAM ClientProof: " << last_field); } const auto proof = last_field.substr(2); const auto input = StringSplitter::split(client_final_message_without_proof.toString(), ","); if (input.size() < 2) { // Add count for proof back on. return badCount(input.size() + 1); } if (!str::startsWith(input[0], "c=") || input[0].size() < 3) { return Status(ErrorCodes::BadValue, str::stream() << "Incorrect SCRAM channel binding: " << input[0]); } const auto cbind = input[0].substr(2); if (!str::startsWith(input[1], "r=") || input[1].size() < 6) { return Status(ErrorCodes::BadValue, str::stream() << "Incorrect SCRAM client|server nonce: " << input[1]); } const auto nonce = input[1].substr(2); // Concatenated nonce sent by client should equal the one in server-first-message if (nonce != _nonce) { return Status(ErrorCodes::BadValue, str::stream() << "Unmatched SCRAM nonce received from client in second step, expected " << _nonce << " but received " << nonce); } // Do server side computations, compare storedKeys and generate client-final-message // AuthMessage := client-first-message-bare + "," + // server-first-message + "," + // client-final-message-without-proof // ClientSignature := HMAC(StoredKey, AuthMessage) // ClientKey := ClientSignature XOR ClientProof // ServerSignature := HMAC(ServerKey, AuthMessage) if (!_secrets.verifyClientProof(_authMessage, base64::decode(proof.toString()))) { return Status(ErrorCodes::AuthenticationFailed, "SCRAM authentication failed, storedKey mismatch"); } StringBuilder sb; // ServerSignature := HMAC(ServerKey, AuthMessage) sb << "v=" << _secrets.generateServerSignature(_authMessage); return std::make_tuple(false, sb.str()); } template class SaslSCRAMServerMechanism<SCRAMSHA1Policy>; template class SaslSCRAMServerMechanism<SCRAMSHA256Policy>; MONGO_INITIALIZER_WITH_PREREQUISITES(SASLSCRAMServerMechanism, ("CreateSASLServerMechanismRegistry")) (::mongo::InitializerContext* context) { auto& registry = SASLServerMechanismRegistry::get(getGlobalServiceContext()); registry.registerFactory<SCRAMSHA1ServerFactory>(); registry.registerFactory<SCRAMSHA256ServerFactory>(); return Status::OK(); } } // namespace mongo
aniket328/CompModelMatch
db/migrate/20141028160723_create_asset_doi_logs.rb
<filename>db/migrate/20141028160723_create_asset_doi_logs.rb class CreateAssetDoiLogs < ActiveRecord::Migration def change create_table :asset_doi_logs do |t| t.string :asset_type t.integer :asset_id t.integer :asset_version t.string :action t.text :comment t.timestamps end end end
beaglecode/phantomjs
src/qt/src/gui/dialogs/qpagesetupdialog_win.cpp
/**************************************************************************** ** ** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies). ** All rights reserved. ** Contact: Nokia Corporation (<EMAIL>) ** ** This file is part of the QtGui module of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:LGPL$ ** GNU Lesser General Public License Usage ** This file may be used under the terms of the GNU Lesser General Public ** License version 2.1 as published by the Free Software Foundation and ** appearing in the file LICENSE.LGPL included in the packaging of this ** file. Please review the following information to ensure the GNU Lesser ** General Public License version 2.1 requirements will be met: ** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ** ** In addition, as a special exception, Nokia gives you certain additional ** rights. These rights are described in the Nokia Qt LGPL Exception ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ** ** GNU General Public License Usage ** Alternatively, this file may be used under the terms of the GNU General ** Public License version 3.0 as published by the Free Software Foundation ** and appearing in the file LICENSE.GPL included in the packaging of this ** file. Please review the following information to ensure the GNU General ** Public License version 3.0 requirements will be met: ** http://www.gnu.org/copyleft/gpl.html. ** ** Other Usage ** Alternatively, this file may be used in accordance with the terms and ** conditions contained in a signed written agreement between you and Nokia. ** ** ** ** ** ** $QT_END_LICENSE$ ** ****************************************************************************/ #include "qpagesetupdialog.h" #ifndef QT_NO_PRINTDIALOG #include <qapplication.h> #include <private/qprintengine_win_p.h> #include <private/qabstractpagesetupdialog_p.h> QT_BEGIN_NAMESPACE class QPageSetupDialogPrivate : public QAbstractPageSetupDialogPrivate { }; QPageSetupDialog::QPageSetupDialog(QPrinter *printer, QWidget *parent) : QAbstractPageSetupDialog(*(new QPageSetupDialogPrivate), printer, parent) { } QPageSetupDialog::QPageSetupDialog(QWidget *parent) : QAbstractPageSetupDialog(*(new QPageSetupDialogPrivate), 0, parent) { } int QPageSetupDialog::exec() { Q_D(QPageSetupDialog); if (d->printer->outputFormat() != QPrinter::NativeFormat) return Rejected; QWin32PrintEngine *engine = static_cast<QWin32PrintEngine*>(d->printer->paintEngine()); QWin32PrintEnginePrivate *ep = static_cast<QWin32PrintEnginePrivate *>(engine->d_ptr.data()); PAGESETUPDLG psd; memset(&psd, 0, sizeof(PAGESETUPDLG)); psd.lStructSize = sizeof(PAGESETUPDLG); // we need a temp DEVMODE struct if we don't have a global DEVMODE HGLOBAL hDevMode = 0; int devModeSize = 0; if (!ep->globalDevMode) { devModeSize = sizeof(DEVMODE) + ep->devMode->dmDriverExtra; hDevMode = GlobalAlloc(GHND, devModeSize); if (hDevMode) { void *dest = GlobalLock(hDevMode); memcpy(dest, ep->devMode, devModeSize); GlobalUnlock(hDevMode); } psd.hDevMode = hDevMode; } else { psd.hDevMode = ep->devMode; } HGLOBAL *tempDevNames = ep->createDevNames(); psd.hDevNames = tempDevNames; QWidget *parent = parentWidget(); parent = parent ? parent->window() : QApplication::activeWindow(); Q_ASSERT(!parent ||parent->testAttribute(Qt::WA_WState_Created)); psd.hwndOwner = parent ? parent->winId() : 0; QRect paperRect = d->printer->paperRect(); QRect pageRect = d->printer->pageRect(); psd.Flags = PSD_MARGINS; double multiplier = 1; switch (QLocale::system().measurementSystem()) { case QLocale::MetricSystem: psd.Flags |= PSD_INHUNDREDTHSOFMILLIMETERS; multiplier = 1; break; case QLocale::ImperialSystem: psd.Flags |= PSD_INTHOUSANDTHSOFINCHES; multiplier = 25.4/10; break; } QRect marginRect = ep->getPageMargins(); psd.rtMargin.left = marginRect.left() / multiplier; psd.rtMargin.top = marginRect.top() / multiplier; psd.rtMargin.right = marginRect.width() / multiplier;; psd.rtMargin.bottom = marginRect.height() / multiplier;; bool result = PageSetupDlg(&psd); if (result) { ep->readDevnames(psd.hDevNames); ep->readDevmode(psd.hDevMode); QRect theseMargins = QRect(psd.rtMargin.left * multiplier, psd.rtMargin.top * multiplier, psd.rtMargin.right * multiplier, psd.rtMargin.bottom * multiplier); if (theseMargins != marginRect) { ep->setPageMargins(psd.rtMargin.left * multiplier, psd.rtMargin.top * multiplier, psd.rtMargin.right * multiplier, psd.rtMargin.bottom * multiplier); } ep->updateCustomPaperSize(); // copy from our temp DEVMODE struct if (!ep->globalDevMode && hDevMode) { void *src = GlobalLock(hDevMode); memcpy(ep->devMode, src, devModeSize); GlobalUnlock(hDevMode); } } if (!ep->globalDevMode && hDevMode) GlobalFree(hDevMode); GlobalFree(tempDevNames); done(result); return result; } void QPageSetupDialog::setVisible(bool visible) { if (!visible) return; exec(); } QT_END_NAMESPACE #endif
vltmedia/TaxButler
src/api/launchHelp.js
<filename>src/api/launchHelp.js window.location.href = 'https://github.com/vltmedia/TaxButler';
unicsmcr/hs_auth
utils/sendgrid.go
package utils import ( "github.com/sendgrid/sendgrid-go" "github.com/unicsmcr/hs_auth/environment" ) func NewSendgridClient(env *environment.Env) *sendgrid.Client { return sendgrid.NewSendClient(env.Get(environment.SendgridAPIKey)) }
TimCook1/trident
storage_drivers/ontap/api/rest/client/storage/volume_collection_get_responses.go
// Code generated by go-swagger; DO NOT EDIT. package storage // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "fmt" "io" "github.com/go-openapi/runtime" "github.com/go-openapi/strfmt" "github.com/netapp/trident/storage_drivers/ontap/api/rest/models" ) // VolumeCollectionGetReader is a Reader for the VolumeCollectionGet structure. type VolumeCollectionGetReader struct { formats strfmt.Registry } // ReadResponse reads a server response into the received o. func (o *VolumeCollectionGetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { case 200: result := NewVolumeCollectionGetOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil default: result := NewVolumeCollectionGetDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } if response.Code()/100 == 2 { return result, nil } return nil, result } } // NewVolumeCollectionGetOK creates a VolumeCollectionGetOK with default headers values func NewVolumeCollectionGetOK() *VolumeCollectionGetOK { return &VolumeCollectionGetOK{} } /* VolumeCollectionGetOK describes a response with status code 200, with default header values. OK */ type VolumeCollectionGetOK struct { Payload *models.VolumeResponse } func (o *VolumeCollectionGetOK) Error() string { return fmt.Sprintf("[GET /storage/volumes][%d] volumeCollectionGetOK %+v", 200, o.Payload) } func (o *VolumeCollectionGetOK) GetPayload() *models.VolumeResponse { return o.Payload } func (o *VolumeCollectionGetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { o.Payload = new(models.VolumeResponse) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { return err } return nil } // NewVolumeCollectionGetDefault creates a VolumeCollectionGetDefault with default headers values func NewVolumeCollectionGetDefault(code int) *VolumeCollectionGetDefault { return &VolumeCollectionGetDefault{ _statusCode: code, } } /* VolumeCollectionGetDefault describes a response with status code -1, with default header values. Error */ type VolumeCollectionGetDefault struct { _statusCode int Payload *models.ErrorResponse } // Code gets the status code for the volume collection get default response func (o *VolumeCollectionGetDefault) Code() int { return o._statusCode } func (o *VolumeCollectionGetDefault) Error() string { return fmt.Sprintf("[GET /storage/volumes][%d] volume_collection_get default %+v", o._statusCode, o.Payload) } func (o *VolumeCollectionGetDefault) GetPayload() *models.ErrorResponse { return o.Payload } func (o *VolumeCollectionGetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { o.Payload = new(models.ErrorResponse) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { return err } return nil }
noma/dm-heom
dm-heom/src/heom/communicator.cpp
<filename>dm-heom/src/heom/communicator.cpp // This file is part of DM-HEOM (https://github.com/noma/dm-heom) // // Copyright (c) 2015-2019 <NAME>, Zuse Institute Berlin // // Licensed under the 3-clause BSD License, see accompanying LICENSE, // CONTRIBUTORS.md, and README.md for further information. #include "heom/communicator.hpp" #include "heom/instance.hpp" #include "heom/hierarchy_partition.hpp" #include "debug.hpp" namespace heom { const std::string mpi_error_to_string(int err) { switch (err) { case MPI_SUCCESS: return "No error (MPI_SUCCESS)."; case MPI_ERR_BUFFER: return "Invalid buffer pointer (MPI_ERR_BUFFER)."; case MPI_ERR_COUNT: return "Invalid count argument (MPI_ERR_COUNT)."; case MPI_ERR_TYPE: return "Invalid datatype argument (MPI_ERR_TYPE)."; case MPI_ERR_TAG: return "Invalid tag argument (MPI_ERR_TAG)."; case MPI_ERR_COMM: return "Invalid communicator (MPI_ERR_COMM)."; case MPI_ERR_RANK: return "Invalid rank (MPI_ERR_RANK)."; case MPI_ERR_REQUEST: return "Invalid request (handle) (MPI_ERR_REQUEST)."; case MPI_ERR_ROOT: return "Invalid root (MPI_ERR_ROOT)."; case MPI_ERR_GROUP: return "Invalid group (MPI_ERR_GROUP)."; case MPI_ERR_OP: return "Invalid operation (MPI_ERR_OP)."; case MPI_ERR_TOPOLOGY: return "Invalid topology (MPI_ERR_TOPOLOGY)."; case MPI_ERR_DIMS: return "Invalid dimension argument (MPI_ERR_DIMS)."; case MPI_ERR_ARG: return "Invalid argument of some other kind (MPI_ERR_ARG)."; case MPI_ERR_UNKNOWN: return "Unknown error (MPI_ERR_UNKNOWN)."; case MPI_ERR_TRUNCATE: return " (Message truncated on receive)."; case MPI_ERR_OTHER: return "Known error not in this list (MPI_ERR_OTHER)."; case MPI_ERR_INTERN: return "Internal MPI (implementation) error (MPI_ERR_INTERN)."; case MPI_ERR_IN_STATUS: return "Error code is in status (MPI_ERR_IN_STATUS)."; case MPI_ERR_PENDING: return "Pending request (MPI_ERR_PENDING)."; case MPI_ERR_LASTCODE: return "Last error code (MPI_ERR_LASTCODE)."; default: return "Unknown error code."; } } bool communicator::initialised_ = false; int communicator::rank_ = -1; int communicator::ranks_ = -1; void communicator::check_error(const int err, const std::string& msg) { if (err != MPI_SUCCESS) throw communication_error(msg + mpi_error_to_string(err)); } void communicator::init(int* argc, char*** argv) { check_error(MPI_Init(argc, argv), "MPI_Init"); check_error(MPI_Comm_rank(MPI_COMM_WORLD, &rank_), "MPI_Comm_rank"); check_error(MPI_Comm_size(MPI_COMM_WORLD, &ranks_), "MPI_Comm_size"); initialised_ = true; } void communicator::deinit() { if (!initialised_) throw communication_error("communicator::deinit() called without prior init()."); //check_error(MPI_Barrier(MPI_COMM_WORLD), "MPI_Barrier"); // NOTE: activate for debugging, with output before and after check_error(MPI_Finalize(), "MPI_Finalize"); } int communicator::rank() { return rank_; } int communicator::ranks() { return ranks_; } // NOTE: the idea here is to // - build send/recv data types so MPI handles packing/unpacking, // - setup neighbourhood communicators to reflect the partition graph, and // - then use MPI_Neighbour_alltoallw on those communictor with the data types to do the neighbour exchange // NOTE: out = sent by this process, in = received by this process communicator::communicator(const hierarchy_partition& partition, const instance& inst) : sources_(partition.in_neighbour_ranks().begin(), partition.in_neighbour_ranks().end()), // collect source ranks into a vector that can be used in MPI-calls via data() destinations_(partition.out_neighbour_ranks().begin(), partition.out_neighbour_ranks().end()), // same for destination rank, see above compute_size_byte_(partition.compute_nodes() * inst.size_hierarchy_top_byte()), halo_size_byte_(partition.halo_nodes() * inst.size_hierarchy_top_byte()), send_buffer_(reinterpret_cast<void*>(inst.hierarchy())), recv_buffer_(reinterpret_cast<void*>(reinterpret_cast<std::int8_t*>(inst.hierarchy()) + compute_size_byte_)), // NOTE: cast to int8_t for byte-sized pointer arithmetic send_size_matrix_(ranks_, ranks_, 0) { // set container sizes to avoid re-allocations recv_counts_.reserve(sources_.size()); send_counts_.reserve(destinations_.size()); recv_displacements_.reserve(sources_.size()); send_displacements_.reserve(destinations_.size()); recv_mpi_types_.reserve(sources_.size()); send_mpi_types_.reserve(destinations_.size()); recv_mpi_type_sizes_.reserve(sources_.size()); send_mpi_type_sizes_.reserve(destinations_.size()); // set up neighbourhood communicator from partition graph check_error(MPI_Dist_graph_create_adjacent(MPI_COMM_WORLD, // comm_old sources_.size(), // indegree sources_.data(), // sources[] reinterpret_cast<const int *>(MPI_UNWEIGHTED), // sourceweights, NOTE: in some implementations, MPI_UNWEIGHTED has the wrong type destinations_.size(), // outdegree destinations_.data(), // destinations[] reinterpret_cast<const int *>(MPI_UNWEIGHTED), // destweights, NOTE: in some implementations, MPI_UNWEIGHTED has the wrong type MPI_INFO_NULL, // info 0, // reorder &neighbour_comm_), // comm_dist_graph "MPI_Dist_graph_create_adjacent"); // create MPI type for a single hierarchy node, aka sigma matrix check_error(MPI_Type_contiguous(inst.size_hierarchy_top_byte() / sizeof(real_t), real_mpi_type_, &hierarchy_node_mpi_type_), "MPI_Type_contiguous"); check_error(MPI_Type_commit(&hierarchy_node_mpi_type_), "MPI_Type_commit"); DEBUG_ONLY( int type_size = 0; check_error(MPI_Type_size(real_mpi_type_, &type_size), "MPI_Type_size"); assert(static_cast<size_t>(type_size) == sizeof(real_t)); check_error(MPI_Type_size(hierarchy_node_mpi_type_, &type_size), "MPI_Type_size"); assert(static_cast<size_t>(type_size) == inst.size_hierarchy_top_byte()); ) // setup send/out/destinations stuff (complicated, non-contiguous sub-sets of nodes computed on this rank, needed by others, matched by a contiguous receive data type on another rank) // create MPI data types for each rank this one is sending to ASSERT_ONLY( rank_t last_destination = -1; ) for (const auto& destination : destinations_) { assert(destination > last_destination); // make sure the expected rank order is valid, i.e. numerical order // collect blocks of of contiguous hierarchy nodes for that each neighbour rank const auto& send_uids = partition.rank_to_out_neighbours().at(destination); // set of nodes this rank needs to send to destination, global uids int current_block_length = 0; std::vector<int> displacements; // in hierarchy nodes std::vector<int> block_lengths; // in hierarchy nodes uid_t last_lid = hierarchy_graph::invalid_uid; // track last uid to detect contiguously aligned blocks of local hierarchy nodes for (const auto& current_uid : send_uids) { rank_t current_lid = partition.uid_to_lid().at(current_uid); assert(current_lid < partition.compute_nodes()); // must be a locally computed hierarchy node if (current_lid != (last_lid + 1) || last_lid == hierarchy_graph::invalid_uid) // block start/end non-contiguous lid sequence, or first block { // end block, if there was one if (last_lid != hierarchy_graph::invalid_uid) { assert(current_block_length > 0); block_lengths.push_back(current_block_length); // add the block lengths } // start new block current_block_length = 0; // reset block length displacements.push_back(current_lid); // block starts at displacement } last_lid = current_lid; ++current_block_length; // increment block length in any case } block_lengths.push_back(current_block_length); // finish last block block_lengths.shrink_to_fit(); displacements.shrink_to_fit(); assert(block_lengths.size() == displacements.size()); send_counts_.push_back(1); // NOTE: currently exactly one instance per type is required send_displacements_.push_back(0); // NOTE: currently not needed, first block of MPI_Type_indexed contains a displacement already, i.e. each type is relative to the start of send_buffer MPI_Datatype new_mpi_type; check_error(MPI_Type_indexed(block_lengths.size(), // int count block_lengths.data(), // int* array_of_blocklengths, displacements.data(), // int *array_of_displacements, hierarchy_node_mpi_type_, // MPI_Datatype oldtype, &new_mpi_type), // MPI_Datatype *newtype) "MPI_Type_indexed"); check_error(MPI_Type_commit(&new_mpi_type), "MPI_Type_commit"); send_mpi_types_.push_back(new_mpi_type); int type_size = 0; check_error(MPI_Type_size(new_mpi_type, &type_size), "MPI_Type_size"); send_mpi_type_sizes_.push_back(static_cast<size_t>(type_size)); DEBUG_ONLY( std::cout << "communicator::communicator(..): created MPI_Type_indexed to send from rank " << rank_ << " to " << destination << " of " << boost::format("%.2f MiB") % (type_size / 1024.0 / 1024.0) << " in " << block_lengths.size() << " blocks" << std::endl; ) DEBUG_ONLY( for (size_t i = 0; i < displacements.size(); ++i) { std::cout << "communicator::communicator(..): rank " << rank_ << " to " << destination << ": block " << i << ": l = " << block_lengths[i] << ", d = " << displacements[i] << std::endl; assert(((displacements[i] + block_lengths[i])) <= (partition.compute_nodes())); } ) ASSERT_ONLY( last_destination = destination; ) } assert(destinations_.size() == send_counts_.size() && send_counts_.size() == send_displacements_.size() && send_displacements_.size() == send_mpi_types_.size()); // setup recv/in/sources stuff (simple, contiguous sets of nodes needed by this rank sent from other ranks, matched by a non-contiguous send data type on another rank) ASSERT_ONLY( rank_t last_source = -1; ) int node_offset = 0; for (const auto& source : sources_) { assert(source > last_source); // make sure the expected rank order is valid, i.e. numerical order recv_counts_.push_back(1); // NOTE: currently exactly one instance per type is required const int node_count = partition.rank_to_in_neighbours().at(source).size(); const int displacement_byte = node_offset * inst.size_hierarchy_top_byte(); recv_displacements_.push_back(displacement_byte); node_offset += node_count; // data is already aligned contiguously but might vary in size for each neighbour partition // contiguous buffer containing the amount of hierarchy nodes received by a certain neighbour rank MPI_Datatype new_mpi_type; check_error(MPI_Type_contiguous(node_count, hierarchy_node_mpi_type_, &new_mpi_type), "MPI_Type_contiguous"); check_error(MPI_Type_commit(&new_mpi_type), "MPI_Type_commit"); recv_mpi_types_.push_back(new_mpi_type); int type_size = 0; check_error(MPI_Type_size(new_mpi_type, &type_size), "MPI_Type_size"); recv_mpi_type_sizes_.push_back(static_cast<size_t>(type_size)); DEBUG_ONLY( std::cout << "communicator::communicator(..): created MPI_Type_contiguous to recv on rank " << rank_ << " from " << source << " of " << boost::format("%.2f MiB") % (type_size / 1024.0 / 1024.0) << " with " << node_count << " hierarchy nodes, displaced by " << displacement_byte << " byte" << std::endl; ) ASSERT_ONLY( last_source = source; ) } assert(sources_.size() == recv_counts_.size() && recv_counts_.size() == recv_displacements_.size() && recv_displacements_.size() == recv_mpi_types_.size()); // exchange data type sizes // prepare vector with this rank's values std::vector<size_t> send_row(ranks_, 0); // row of send sizes with index = rank for this rank for (size_t i = 0; i < destinations_.size(); ++i) send_row[destinations_[i]] = send_mpi_type_sizes_[i]; // exchange rows between all nodes const size_t data_size = ranks_ * sizeof(size_t); // one row of send_size_matrix_ check_error(MPI_Allgather(send_row.data(), //const void *sendbuf data_size, //int sendcount MPI_BYTE, //MPI_Datatype sendtype send_size_matrix_.data(), //void *recvbuf data_size, //int recvcount MPI_BYTE, MPI_COMM_WORLD), // MPI_Comm comm "MPI_Allgather"); } communicator::~communicator() { check_error(MPI_Type_free(&hierarchy_node_mpi_type_), "MPI_Type_free"); for (auto& type : send_mpi_types_) check_error(MPI_Type_free(&type), "MPI_Type_free"); for (auto& type : recv_mpi_types_) check_error(MPI_Type_free(&type), "MPI_Type_free"); } void communicator::barrier() { check_error(MPI_Barrier(MPI_COMM_WORLD), "MPI_Barrier"); } void communicator::neighbour_exchange(void* send_buffer, void* recv_buffer) { // actual data transfer DEBUG_ONLY( std::cout << "communicator::neighbour_exchange(): calling MPI_Neighbor_alltoallw(..) on rank " << rank_ << std::endl; ) check_error(MPI_Neighbor_alltoallw(send_buffer, // const void *sendbuf, send_counts_.data(), // const int sendcounts[], send_displacements_.data(), // send_displacements_.data(), // const MPI_Aint sdispls[], send_mpi_types_.data(), // const MPI_Datatype sendtypes[], recv_buffer, // void *recvbuf, recv_counts_.data(), // const int recvcounts[], recv_displacements_.data(), // const MPI_Aint rdispls[], recv_mpi_types_.data(), // const MPI_Datatype recvtypes[], neighbour_comm_), // MPI_Comm comm) "MPI_Neighbor_alltoallw"); } void communicator::write_neighbour_exchange_table(std::ostream& os) { // NOTE: we produce human readable output here, use the _raw version for further processing //const char delimiter = default_delimiter; const char delimiter = ' '; // formatting std::stringstream entry_format; entry_format << "%9.2f MiB"; // 9-3 digits before the dot std::stringstream entry_sample; entry_sample << boost::format(entry_format.str()) % 0.0; // to get the length std::stringstream first_row_format; // format string for rank header first_row_format << "%" << entry_sample.str().length() << "d"; // header spacing must match entry_format std::string first_col_head = "send_rank"; std::string last_row_begin = " recv_sum"; // same length as above std::string last_col_head = "send_sum"; last_col_head = std::string(entry_sample.str().length() - last_col_head.length(), ' ') + last_col_head; // prepend spaces to match entry std::stringstream first_col_format; // format string first_col_format << "%" << first_col_head.length() << "d"; std::stringstream last_col_format; // format string last_col_format << "%" << last_col_head.length() << "d"; auto size_to_mib = [&](size_t size) { std::stringstream ss; ss << (boost::format(entry_format.str()) % (size / 1024.0 / 1024.0)); return ss.str(); }; // header with ranks os << first_col_head; for (rank_t r = 0; r < ranks_; ++r) os << delimiter << boost::format(first_row_format.str()) % r; os << delimiter << last_col_head << "\n"; // rows std::vector<size_t> col_sums(ranks_, 0); for (rank_t r = 0; r < ranks_; ++r) { // rows os << boost::format(first_col_format.str()) % r; // current rank in first column size_t row_sum = 0; for (rank_t c = 0; c < ranks_; ++c) { // columns in each row size_t& current_size = send_size_matrix_.at(r, c); os << delimiter << size_to_mib(current_size); // write value row_sum += current_size; // count row sum of current row col_sums[c] += current_size; // count column sums } os << delimiter << size_to_mib(row_sum) << "\n"; // last column contains row sum } // last row contains column sums, i.e. recv counts, for rank in header of that column os << last_row_begin; size_t data_volume = 0; for (rank_t r = 0; r < ranks_; ++r) { // columns of last row size_t current_col_sum = col_sums[r]; os << delimiter << size_to_mib(current_col_sum); data_volume += current_col_sum; } os << delimiter << size_to_mib(data_volume) << std::endl; } void communicator::write_neighbour_exchange_table_raw(std::ostream& os) { send_size_matrix_.print_raw(os, default_delimiter); } } // namespace heom
Scandy-co/react-360
React360/js/Views/Sphere.js
<gh_stars>1000+ /** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @flow */ import * as THREE from 'three'; import merge from '../Utils/merge'; import type GuiSys from '../OVRUI/UIView/GuiSys'; import type {ReactNativeContext} from '../ReactNativeContext'; import RCTBaseMesh from './BaseMesh'; const SphereGeometryCache = {}; function createSphereGeometry( radius: number, heightSegments: number, widthSegments: number, ) { const key = `${radius}:${heightSegments}:${widthSegments}`; const cache = SphereGeometryCache[key]; if (cache) { cache.count++; return cache.geom; } const geometry = new THREE.SphereBufferGeometry( radius, widthSegments, heightSegments, ); SphereGeometryCache[key] = { geom: geometry, count: 1, }; return geometry; } const sphereRayCast = (function() { // avoid create temp objects; const inverseMatrix = new THREE.Matrix4(); const ray = new THREE.Ray(); const intersectionPoint = new THREE.Vector3(); const intersectionPointWorld = new THREE.Vector3(); return function(sphere, raycaster, intersects) { // transform the ray into the space of the sphere inverseMatrix.getInverse(this.matrixWorld); ray.copy(raycaster.ray).applyMatrix4(inverseMatrix); const intersect = ray.intersectSphere(sphere, intersectionPoint); if (intersect === null) { return; } // determine hit location in world space intersectionPointWorld.copy(intersectionPoint); intersectionPointWorld.applyMatrix4(this.matrixWorld); const distance = raycaster.ray.origin.distanceTo(intersectionPointWorld); if (distance < raycaster.near || distance > raycaster.far) { return; } intersects.push({ distance: distance, point: intersectionPointWorld.clone(), object: this, }); }; })(); export default class RCTSphere extends RCTBaseMesh { _radius: number; _heightSegments: number; _widthSegments: number; _needsUpdate: boolean; constructor(guiSys: GuiSys, rnctx: ReactNativeContext) { super(guiSys, rnctx); this._radius = 0.5; this._heightSegments = 6; this._widthSegments = 8; this._needsUpdate = false; Object.defineProperty( this.props, 'radius', ({ set: radius => { this._radius = radius; this._needsUpdate = true; }, }: Object), ); Object.defineProperty( this.props, 'heightSegments', ({ set: segments => { this._heightSegments = segments; this._needsUpdate = true; }, }: Object), ); Object.defineProperty( this.props, 'widthSegments', ({ set: segments => { this._widthSegments = segments; this._needsUpdate = true; }, }: Object), ); (this: any)._generateGeometry = this._generateGeometry.bind(this); } frame() { if (this._needsUpdate) { this._needsUpdate = false; this._generateGeometry(); } } _generateGeometry() { const geometry = createSphereGeometry( this._radius, this._widthSegments, this._heightSegments, ); this._setGeometry(geometry); const sphere = new THREE.Sphere(new THREE.Vector3(0, 0, 0), this._radius); this.mesh.raycast = sphereRayCast.bind(this.mesh, sphere); } static describe() { return merge(super.describe(), { // register the properties sent from react to runtime NativeProps: { radius: 'number', widthSegments: 'number', heightSegments: 'number', }, }); } }
hmcts/da-cos-api
src/test/java/uk/gov/hmcts/reform/da/dacase/model/ApplicationTypeTest.java
package uk.gov.hmcts.reform.da.dacase.model; import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; class ApplicationTypeTest { @Test void shouldReturnTrueIfNonMolestationOrderType() { assertThat(ApplicationType.NON_MOLESTATION_ORDER.isNonMolestationOrder()).isTrue(); } @Test void shouldReturnTrueIfOccupationalOrder() { assertThat(ApplicationType.OCCUPATIONAL_ORDER.isOccupationalOrder()).isTrue(); } }
njam/puppet-packages
modules/less/spec/init/spec.rb
require 'spec_helper' describe 'less' do describe package('less') do it { should be_installed.by('npm') } end describe command('which lessc') do its(:exit_status) { should eq 0 } end end
bvaudour/carapace-bin
completers/gh_completer/cmd/secret_remove.go
package cmd import ( "github.com/rsteube/carapace" "github.com/rsteube/carapace-bin/completers/gh_completer/cmd/action" "github.com/spf13/cobra" ) var secret_removeCmd = &cobra.Command{ Use: "remove", Short: "Remove secrets", Run: func(cmd *cobra.Command, args []string) {}, } func init() { carapace.Gen(secret_removeCmd).Standalone() secret_removeCmd.Flags().StringP("env", "e", "", "Remove a secret for an environment") secret_removeCmd.Flags().StringP("org", "o", "", "Remove a secret for an organization") secret_removeCmd.Flags().BoolP("user", "u", false, "Remove a secret for your user") secretCmd.AddCommand(secret_removeCmd) carapace.Gen(secret_removeCmd).FlagCompletion(carapace.ActionMap{ "env": action.ActionEnvironments(secret_removeCmd), "org": action.ActionUsers(secret_removeCmd, action.UserOpts{Organizations: true}), }) carapace.Gen(secret_removeCmd).PositionalCompletion( carapace.ActionCallback(func(c carapace.Context) carapace.Action { return action.ActionSecrets(secret_removeCmd, action.SecretOpts{ Org: secret_removeCmd.Flag("org").Value.String(), Env: secret_removeCmd.Flag("env").Value.String(), }, ) }), ) }
furas/python-code
__scraping__/report.clinic - requests, BS/main.py
import requests from bs4 import BeautifulSoup import pandas as pd links = [ 'https://report.clinic/detail/L_1100170', 'https://report.clinic/detail/L_3020779', ] all_data = list() for link in links: r = requests.get(link) soup = BeautifulSoup(r.text,'html.parser') data = { 'tag': [], 'name': "", 'address': "" } for urls in soup.select(".menu_side_item .list_common .list_item_link .display_flex"): tag = urls.get_text(strip=True) data['tag'].append(tag) print('tag:', tag) name = soup.find("h1") if name: name = name.get_text(strip=True) data['name'] = name print('name:', name) for a in soup.find_all('div', class_="panel"): for b in a.find_all('p', class_="headline_h4", text="住所"): print('head:', b.get_text()) address = b.findNext('p') if address: address = address.get_text(strip=True, separator=' | ') data['address'] = address print('address:', address) all_data.append(data) # --- after loop --- #print(all_data) df = pd.DataFrame(all_data) print(df[['address', 'name']])
braveliuchina/spider
src/main/java/cn/cnki/spider/entity/RenderHtmlVO.java
package cn.cnki.spider.entity; import lombok.Data; @Data public class RenderHtmlVO { private int crawlType; private String inputUrl; private String prefix; }
silferysky/RogueArcher
RogueEngine/Source/VegetationPillbug.cpp
/* Start Header ************************************************************************/ /*! \file VegetationPillbug.cpp \project Exale \author <NAME>, <EMAIL>.foo, 440002318 (100%) \par <EMAIL> \date 3 April,2020 \brief This file contains the functions for VegetationPillbug All content (C) 2020 DigiPen (SINGAPORE) Corporation, all rights reserved. Reproduction or disclosure of this file or its contents without the prior written consent of DigiPen Institute of Technology is prohibited. */ /* End Header **************************************************************************/ #include "Precompiled.h" #include "VegetationPillbug.h" #include "PlayerStatusManager.h" //#include "Main.h" //For g_deltaTime and coordinator namespace Rogue { VegetationPillbug::VegetationPillbug(Entity entity, LogicComponent& logicComponent, StatsComponent& statsComponent) : ScriptComponent(entity, logicComponent, statsComponent) { } void VegetationPillbug::AIActiveStateUpdate() { if (!g_engine.m_coordinator.GameIsActive()) return; if (m_entity == PLAYER_STATUS.GetHitchhikedEntity()) // if this entity has been hitchhiked onto { g_engine.m_coordinator.GetComponent<RigidbodyComponent>(m_entity).setIsStatic(false); // switch off the rigidbody g_engine.m_coordinator.GetComponent<SpriteComponent>(m_entity).setTexturePath("Resources/Assets/VegetationPillbugCurl.png"); auto& animation = g_engine.m_coordinator.GetComponent<AnimationComponent>(m_entity); animation.setFrames(5); animation.setCurrentFrame(0); animation.setEndFrame(4); animation.setIsLooping(false); // play the animation only once PLAYER_STATUS.FreezeControls(); m_logicComponent->SetActiveStateBit(static_cast<size_t>(AIState::AIState_Idle)); } } /* void VegetationPillbug::AIIdleUpdate() { } */ void VegetationPillbug::OnCollisionEnter(Entity other) { if (g_engine.m_coordinator.GetHierarchyInfo(other).m_tag == "Ground") { if (m_entity == PLAYER_STATUS.GetHitchhikedEntity()) // if player is still attached g_engine.m_coordinator.GetSystem<PlayerControllerSystem>()->Hitchhike(MAX_ENTITIES); // Proper way to get out of hitchhike PLAYER_STATUS.UnfreezeControls(); g_engine.m_coordinator.AddToDeleteQueue(m_entity); } } }
goodmind/FlowDefinitelyTyped
flow-types/types/newline-remove_vx.x.x/flow_v0.25.x-/newline-remove.js
<gh_stars>10-100 declare module "newline-remove" { /** * Strip all newlines from the given value */ declare function removeNewlines(val: string): string; declare module.exports: typeof removeNewlines; }
jhaapako/tcf
tests/test_rpyc.py
#! /usr/bin/python3 # # Copyright (c) 2021 Intel Corporation # # SPDX-License-Identifier: Apache-2.0 # import hashlib import os import time import commonl.testing import tcfl.tc srcdir = os.path.dirname(__file__) ttbd = commonl.testing.test_ttbd( config_files = [ # strip to remove the compiled/optimized version -> get source os.path.join(srcdir, "conf_%s" % os.path.basename(__file__.rstrip('cd'))) ], errors_ignore = [ "Traceback" ]) @tcfl.tc.target(ttbd.url_spec + " and t0") class _test(tcfl.tc.tc_c): def eval_10(self, target): with self.subcase("power_on"): target.power.on() time.sleep(2) with self.subcase("connect"): # target.certs.get() will cache this in target.tmpdir; # when we re-run with the same tempdir, it catches it, so # wipe'em. FIXME: use the target's allocid to feed into # the tmpdir name to avoid this possible conflict. commonl.rm_f(os.path.join(target.tmpdir, "client.default.key")) commonl.rm_f(os.path.join(target.tmpdir, "client.default.cert")) remote0 = tcfl.tl.rpyc_connect(target, "c0") target.report_pass("remote rpyc connects") with self.subcase("hashlib_import"): hashlib0 = remote0.modules['hashlib'] target.report_pass("remote hashlib imports") with self.subcase("hash"): h = hashlib.sha512("this is a silly test".encode('ascii')) h0 = hashlib0.sha512("this is a silly test".encode('ascii')) if h.hexdigest() == h0.hexdigest(): target.report_pass("remote and local hashes match") else: target.report_fail("remote and local hashes don't match", dict(h0 = h0.hexdigest(), h = h.hexdigest())) with self.subcase("run_file"): # the server has been configfured to touch a file in the # container environment upon power on try: with remote0.builtins.open("/tmp/runthisexecuted", encoding = "utf-8") as f: expected_content = 'ein belegtes Brot mit Schinken' content = f.read().strip() if content == expected_content: target.report_pass("run_file executed properly") else: target.report_fail( "run_file content doesn't match expected?", dict( expected_content = expected_content, expected_content_type = type(expected_content).__name__, read_content = content, read_content_type = type(content).__name__, ), level = 0) except Exception as e: target.report_fail( "run_file didn't run? can't open file", dict(exception = e), level = 0) with self.subcase("power_off"): target.power.off() def teardown_90_scb(self): with self.subcase("server-check"): ttbd.check_log_for_issues(self)
joehewitt/devon
devon/web.py
<filename>devon/web.py import sys, os.path import codeop, cgi, datetime, imp, mimetypes, re, signal, thread, threading import traceback, types, urllib, time import BaseHTTPServer, SocketServer import devon.stream, devon.renderers.html, devon.projects # ************************************************************************************************** # Globals global sites, messageEvent, resumeEvent, userEvent, currentProcess, userEventObject, done, server sites = {} fileCache = {} pageCache = {} messageEvent = None resumeEvent = None currentProcess = None done = False server = None # ************************************************************************************************** # Constants webPort = 3800 kConfigFileName = "config.py" # ************************************************************************************************** class WebServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer): """Multi-threaded version of the basic web server""" pass class WebRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): """Handler that is called on a new thread to handle an HTTP request""" __rePath = re.compile("/(:[^/]+)/?(.*?)$") __reCommand = re.compile("/(:[^/]+)/([^\s:]+):(.*?)$") __reArgs = re.compile(r"(?P<name>[^#=&]+)=(?P<value>[^&]*)&?") # Override logging def log_message(self, format, *args): pass # * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * def do_POST(self): self.data = self.rfile.read(int(self.headers["content-length"])) self.do_GET() def do_GET(self): global sites if self.client_address[0] != "127.0.0.1": pass #return self.serveError(401, "Unauthorized") self.__parseHost() self.__parseURL() renderer = devon.renderers.html.HTMLRenderer() self.out = devon.stream.OutStream(self.wfile, renderer) if self.projectPath: self.project = devon.projects.load(self.projectPath) if not self.command and not self.path: self.command = "index" self.path = "" if self.command == "file": self.serveProjectFile(self.path) elif self.command: self.serveCommand(self.command) else: self.serveFile(self.path) def __lshift__(self, text): self.out.write(text) return self.out # * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * def getTargetProject(self): if self.path: return devon.projects.load(self.path) else: return self.project def serveCommand(self, command): commandPath = os.path.join(devon.webPath, "%s.py" % command) if os.path.isfile(commandPath): try: moduleName = "devon_command_%s" % re.sub("[\./\\\]", "_", commandPath) module = imp.load_source(moduleName, commandPath, file(commandPath)) if hasattr(module, "main"): module.main(self) except PipeBrokenError, exc: print "Connection was broken" except ProcessBlockingError, exc: self.wfile.write("<div>Another process is already running.</div>") self.wfile.write("<script>parent.runBegin(%d);</script>" % exc.pid) except: print "Error during %s..." % self.path exc = sys.exc_info() print traceback.format_exception(*exc) self.wfile.write("""<pre class="traceback">""") traceback.print_exception(exc[0], exc[1], exc[2], file=self.wfile) self.wfile.write("""</>""") else: self.serveError(404, "Command '%s' not found" % command) def serveProjectFile(self, path): if not self.project.wikiPath: return self.serveError(404, "This project has no wiki") wikiPath = os.path.join(self.project.path, self.project.wikiPath) # print "wikiPath %s" % wikiPath wikiProject = devon.projects.load(wikiPath) filePath = os.path.join(wikiProject.path, path) return self.serveRawFile(filePath) def serveFile(self, path, varNames=None): if path == "/": return self.serveError(404, "Please specify a project path") if path[0] == "/": path = path[1:] fullPath = os.path.join(devon.webPath, path) if os.path.exists(fullPath): self.serveRawFile(fullPath) else: fullPath = self.project.getDocumentPath(self.path) if os.path.isfile(fullPath): self.serveCommand("wiki") else: self.serveError(404, "File '%s' not found" % self.path) def serveRawFile(self, path): mimeType, encoding = mimetypes.guess_type(path) if not mimeType: mimeType = "text/plain" self.send_response(200) self.send_header("Content-Type", mimeType) self.end_headers() f = file(path, "rb") stuff = f.read() while stuff: self.wfile.write(stuff) stuff = f.read() f.close() def servePage(self, pagePath, globalVars=None, localVars=None): if not localVars: localVars = {} if not "project" in localVars: localVars["project"] = self.project fullPagePath = os.path.join(devon.webPath, pagePath) if os.path.isfile(fullPagePath): self.send_response(200) self.send_header("Content-type", "text/html") self.end_headers() if fullPagePath in pageCache: page = pageCache[fullPagePath] if page.needsUpdate(): page.update() else: page = WebPage(fullPagePath) pageCache[fullPagePath] = page page.update() page.render(self, globalVars, localVars) else: self.serveError(404, "File '%s' not found" % pagePath) def serveError(self, code, message): self.send_response(code) self.send_header("Content-type", "text/plain") self.end_headers() self.wfile.write(message) def serveText(self, text): self.send_response(200) self.send_header("Content-type", "text/plain") self.end_headers() self.wfile.write(text) def __parseHost(self): host = self.headers.getheader("Host") if ":" in host: host, port = host.split(":") self.host = host def __parseURL(self): source = urllib.unquote(self.path) self.projectPath = "" self.command = "" self.path = "" self.args = {} m = self.__reCommand.match(source) if m: self.projectPath = m.groups()[0].replace(":", "/") self.command = m.groups()[1] source = m.groups()[2] else: m = self.__rePath.match(source) if m: self.projectPath = m.groups()[0].replace(":", "/") source = m.groups()[1] else: source = source[1:] index = source.find("?") if index == -1: if source == "/": self.path = "" else: self.path = source else: self.path = source[0:index] source = source[index:] if source and source[0] == "?": m = self.__reArgs.match(source, 1) while m: gd = m.groupdict() self.args[gd["name"]] = gd["value"] lastOffset = m.end() m = self.__reArgs.match(source, m.end()) if not m: source = source[lastOffset:] # ************************************************************************************************** class WebPage: __reBeginCode = re.compile("<\%") __reEndCode = re.compile("\%>") def __init__(self, path): self.path = path self.lastUpdate = None self.blocks = None def needsUpdate(self): fileTime = datetime.datetime.fromtimestamp(os.stat(self.path).st_mtime) return fileTime > self.lastUpdate def update(self): self.blocks = [] if not os.path.isfile(self.path): return self.lastUpdate = datetime.datetime.now() f = file(self.path) text = f.read() f.close() index = 0 m = self.__reBeginCode.search(text) while m: m2 = self.__reEndCode.search(text, m.end()) if not m2: raise Exception("Syntax Error: Unclosed code block") textBlock = text[index:m.start()] self.blocks.append(textBlock) codeBlock = text[m.end():m2.start()].strip() code = codeop.compile_command(codeBlock, self.path) self.blocks.append(code) index = m2.end() m = self.__reBeginCode.search(text, index) block = text[index:] self.blocks.append(block) def render(self, request, globalVars=None, localVars=None): for block in self.blocks: if isinstance(block, types.CodeType): exec(block, globalVars, localVars) else: request.wfile.write(block) class WebProcess: def run(): pass # ************************************************************************************************** def serve(): global sites, done, messageEvent, server #signal.signal(signal.SIGINT, terminate) sites = loadSites() devon.projects.loadExternalProjects() messageEvent = threading.Event() # Run the server on a separate thread thread.start_new_thread(runServer, ()) # In a third thread, wait for requests to spawn a process and direct its output through a server # request socket. We can't do this in the main thread because we need it to catch signals. (The # signal module only delivers signals to the main thread, and not during some blocking system # calls like event waits, which we use in this third thread.) thread.start_new_thread(awaitMessages, ()) while not done: try: time.sleep(0.3) except IOError: # On Windows, Python allows sleep to be interrupted but throws an exc; ignore it pass messageEvent.set() server.server_close() # XXXblake Wait on other threads to shutdown cleanly... print "Saving projects..." devon.projects.shutdownProjects() def terminate(sig_num, frame): # XXXblake stopServer() def runServer(): print "Starting server..." global server server = WebServer(("", webPort), WebRequestHandler) server.allow_reuse_address = True server.serve_forever() print "Serve stopped" def stopServer(): global done done = True messageEvent.set() def awaitMessages(): global messageEvent, resumeEvent, userEvent, currentProcess resumeEvent = threading.Event() userEvent = threading.Event() while 1: messageEvent.wait() if done: break if currentProcess: try: currentProcess.run() except PipeBrokenError, exc: print "Connection was broken" messageEvent.clear() resumeEvent.set() def runProcess(process): global messageEvent, resumeEvent, currentProcess if currentProcess: raise ProcessBlockingError(currentProcess.pid) # Unblock the main thread so it can run the process currentProcess = process messageEvent.set() # Wait for the spawned process to complete before exiting resumeEvent.wait() resumeEvent.clear() currentProcess = None def loadConfigFile(filename): path = os.path.join(os.path.expanduser(devon.userPath), filename) if not os.path.isfile(path): return None name = filename[:filename.rfind(".")] return imp.load_source(name, path, file(path)) def loadSites(): config = loadConfigFile(kConfigFileName) sites = {} if "sites" in dir(config): for siteName in config.sites: projectPaths = config.sites[siteName] if not (isinstance(projectPaths, tuple) or isinstance(projectPaths, list)): projectPaths = [projectPaths] projects = [] for projectPath in projectPaths: project = devon.projects.load(projectPath) if project: projects.append(project) # Load the project's child projects here, too. We would do this anyways when the # project catalog is written, but if we do it here we handle the case where a # site is already loaded (e.g. from a previous session) before Devon is started. project.getChildProjects(True) sites[siteName] = projects return sites def postUserEvent(event): global userEventObject userEventObject = event userEvent.set() def waitForUserEvent(): userEvent.wait() userEvent.clear() global userEventObject return userEventObject # ************************************************************************************************** # Helper functions used by web/{build.py, buildTest.py}. Could probably find a better place for them. from devon.tags import * import devon.make, sys def buildProcess(request, action): # For some reason, Firefox ignores CSS files unless we pause before entering the build phase if sys.platform == "win32": import time time.sleep(0.1) request << Header(level=1) << "Building..." << Close << Flush if "config" in request.args: config = request.args["config"] else: config = "debug" request << Script << "parent." << action << "Begin();" << Close try: project = request.getTargetProject() result = devon.make.make(project, action, request.out, config) request << Script << "parent." << action << "End();" << Close except: request << Script << "parent." << action << "End();" << Close raise if result == 0: request << Block("resultBox result-success") << "Success" << Close else: request << Block("resultBox result-failure") << "Errors occurred" << Close # ************************************************************************************************** class PipeBrokenError(Exception): pass class ProcessBlockingError(Exception): def __init__(self, pid): self.pid = pid
django-oscar/django-oscar-datacash
tests/urls.py
from django.conf.urls import * from datacash.dashboard.app import application urlpatterns = patterns('', # Include dashboard URLs (r'^dashboard/datacash/', include(application.urls)), (r'^datacash/', include('datacash.urls')), )
cooperative-humans/localorbit
app/controllers/api/v1/filters_controller.rb
<gh_stars>10-100 module Api module V1 class FiltersController < ApplicationController before_action :require_selected_market before_action :require_market_open def index # Note: ideally, we want to return the categories that have viable products for # sale, and only the categories that have viable products for sale. # Unfortunately, determining whether or not a product is for sale is an expensive # query that depends on a chain of many-to-many relationships and dynamic data. # (It depends on the user's current market and org, it depends on what prices # are available for a product, it depends on a product's lots, it depends on the # product's delivery schedule, etc.) # This is a compromise between accuracy and speed: all of the products listed for # a market, but without checking pricing and inventory data. if params[:parent_id] && params[:parent_id] === "suppliers" filters = current_market .organizations .where(can_sell: true, active: true).order('name') elsif params[:parent_id] filters = Category .where("id in (#{secondary_subquery.to_sql})") .where(parent_id: params[:parent_id]) .order('lft') else filters = Category.where("id in (#{top_level_subquery.to_sql})").order('lft') end render :json => {filters: filters.select(:name, :id)} end private def secondary_subquery Product .select(:second_level_category_id) .where(organization: current_market.organizations.select(:id).to_a) .uniq end def top_level_subquery Product .select(:top_level_category_id) .where(organization: current_market.organizations.select(:id).to_a) .uniq end end end end
simonCor/poor-mans-ducky-pad
firmware/Inc/animations.h
<reponame>simonCor/poor-mans-ducky-pad #ifndef __ANIMATIONS_H #define __ANIMATIONS_H #ifdef __cplusplus extern "C" { #endif #include "stm32f4xx_hal.h" #define THREE 3 #define ANIMATION_NO_ANIMATION 0 #define ANIMATION_CROSS_FADE 1 #define ANIMATION_FULLY_ON 2 #define DEFAULT_BG_RED 64 #define DEFAULT_BG_GREEN 16 #define DEFAULT_BG_BLUE 255 #define DEFAULT_KD_RED 255 #define DEFAULT_KD_GREEN 255 #define DEFAULT_KD_BLUE 255 typedef struct { uint32_t animation_start; uint8_t animation_type; uint8_t animation_duration; uint8_t index; uint8_t target_color[THREE]; double current_color[THREE]; double step[THREE]; } led_animation; void led_animation_handler(void); void anime_init(void); void keydown_anime_start(uint8_t idx); void keydown_anime_end(uint8_t idx); void error_animation(uint8_t stage); void change_bg(void); void neopixel_off(void); #ifdef __cplusplus } #endif #endif
joyvuu-dave/comeals-rewrite
app/serializers/rotation_serializer.rb
# == Schema Information # # Table name: rotations # # id :bigint not null, primary key # color :string not null # description :string default(""), not null # place_value :integer # residents_notified :boolean default(FALSE), not null # start_date :date # created_at :datetime not null # updated_at :datetime not null # community_id :bigint not null # # Indexes # # index_rotations_on_community_id (community_id) # # Foreign Keys # # fk_rails_... (community_id => communities.id) # class RotationSerializer < ActiveModel::Serializer attributes :id, :type, :start, :end, :color, :title, :url def id object.cache_key_with_version end def type object.class.to_s end def start object.meals.order(:date).first.date + 1.minute end def end object.meals.order(:date).last.date + 1.day - 1.minute # b/c ReactBigCalendar date ranges are exclusive end def title "Rotation #{object.place_value}" end def url "rotations/show/#{object.id}" end end
icaynia/SoundKi
pracler/src/main/java/com/icaynia/pracler/Fragment/RootFragmentPos1.java
package com.icaynia.pracler.Fragment; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.ListView; import com.icaynia.pracler.R; /** * Created by icaynia on 16/03/2017. */ public class RootFragmentPos1 extends Fragment { private View v; private FrameLayout framelayout; // TODO VIEW private ListView listView; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { v = inflater.inflate(R.layout.fragment_root_pos_1, container, false); setHasOptionsMenu(true); viewInitialize(); prepare(); return v; } public void viewInitialize() { framelayout = (FrameLayout) v.findViewById(R.id.framelayout); } public void prepare() { getFragmentManager().beginTransaction() .add(R.id.framelayout, new MyMusicListFragment()) .commit(); } }
faturita/mne-python
mne/include/qt/QtScriptTools/5.6.2/QtScriptTools/private/qscriptdebuggercommandschedulerfrontend_p.h
/**************************************************************************** ** ** Copyright (C) 2015 The Qt Company Ltd. ** Contact: http://www.qt.io/licensing/ ** ** This file is part of the QtSCriptTools module of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:LGPL21$ ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and The Qt Company. For licensing terms ** and conditions see http://www.qt.io/terms-conditions. For further ** information use the contact form at http://www.qt.io/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 2.1 or version 3 as published by the Free ** Software Foundation and appearing in the file LICENSE.LGPLv21 and ** LICENSE.LGPLv3 included in the packaging of this file. Please review the ** following information to ensure the GNU Lesser General Public License ** requirements will be met: https://www.gnu.org/licenses/lgpl.html and ** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ** ** As a special exception, The Qt Company gives you certain additional ** rights. These rights are described in The Qt Company LGPL Exception ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ** ** $QT_END_LICENSE$ ** ****************************************************************************/ #ifndef QSCRIPTDEBUGGERCOMMANDSCHEDULERFRONTEND_P_H #define QSCRIPTDEBUGGERCOMMANDSCHEDULERFRONTEND_P_H // // W A R N I N G // ------------- // // This file is not part of the Qt API. It exists purely as an // implementation detail. This header file may change from version to // version without notice, or even be removed. // // We mean it. // #include <QtCore/qobjectdefs.h> #include <QtCore/qstring.h> QT_BEGIN_NAMESPACE class QScriptDebuggerCommandSchedulerInterface; class QScriptDebuggerResponseHandlerInterface; class QScriptDebuggerCommand; class QScriptDebuggerValue; class QScriptBreakpointData; class Q_AUTOTEST_EXPORT QScriptDebuggerCommandSchedulerFrontend { public: QScriptDebuggerCommandSchedulerFrontend( QScriptDebuggerCommandSchedulerInterface *scheduler, QScriptDebuggerResponseHandlerInterface *responseHandler); ~QScriptDebuggerCommandSchedulerFrontend(); // execution control int scheduleInterrupt(); int scheduleContinue(); int scheduleStepInto(int count = 1); int scheduleStepOver(int count = 1); int scheduleStepOut(); int scheduleRunToLocation(const QString &fileName, int lineNumber); int scheduleRunToLocation(qint64 scriptId, int lineNumber); int scheduleForceReturn(int contextIndex, const QScriptDebuggerValue &value); // breakpoints int scheduleSetBreakpoint(const QString &fileName, int lineNumber); int scheduleSetBreakpoint(const QScriptBreakpointData &data); int scheduleDeleteBreakpoint(int id); int scheduleDeleteAllBreakpoints(); int scheduleGetBreakpoints(); int scheduleGetBreakpointData(int id); int scheduleSetBreakpointData(int id, const QScriptBreakpointData &data); // scripts int scheduleGetScripts(); int scheduleGetScriptData(qint64 id); int scheduleScriptsCheckpoint(); int scheduleGetScriptsDelta(); int scheduleResolveScript(const QString &fileName); // stack int scheduleGetBacktrace(); int scheduleGetContextCount(); int scheduleGetContextState(int contextIndex); int scheduleGetContextInfo(int contextIndex); int scheduleGetContextId(int contextIndex); int scheduleGetThisObject(int contextIndex); int scheduleGetActivationObject(int contextIndex); int scheduleGetScopeChain(int contextIndex); int scheduleContextsCheckpoint(); int scheduleGetPropertyExpressionValue(int contextIndex, int lineNumber, const QStringList &path); int scheduleGetCompletions(int contextIndex, const QStringList &path); // iteration int scheduleNewScriptValueIterator(const QScriptDebuggerValue &object); int scheduleGetPropertiesByIterator(int id, int count); int scheduleDeleteScriptValueIterator(int id); // evaluation int scheduleEvaluate(int contextIndex, const QString &program, const QString &fileName = QString(), int lineNumber = 1); int scheduleScriptValueToString(const QScriptDebuggerValue &value); int scheduleSetScriptValueProperty(const QScriptDebuggerValue &object, const QString &name, const QScriptDebuggerValue &value); int scheduleClearExceptions(); int scheduleNewScriptObjectSnapshot(); int scheduleScriptObjectSnapshotCapture(int id, const QScriptDebuggerValue &object); int scheduleDeleteScriptObjectSnapshot(int id); private: int scheduleCommand(const QScriptDebuggerCommand &command); QScriptDebuggerCommandSchedulerInterface *m_scheduler; QScriptDebuggerResponseHandlerInterface *m_responseHandler; Q_DISABLE_COPY(QScriptDebuggerCommandSchedulerFrontend) }; QT_END_NAMESPACE #endif
ThalesGroup/wrangler
wrangler-core/src/main/java/co/cask/wrangler/expression/EL.java
<reponame>ThalesGroup/wrangler<filename>wrangler-core/src/main/java/co/cask/wrangler/expression/EL.java /* * Copyright © 2017 <NAME>, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package co.cask.wrangler.expression; import co.cask.cdap.api.common.Bytes; import co.cask.functions.DDL; import co.cask.functions.DataQuality; import co.cask.functions.Dates; import co.cask.functions.GeoFences; import co.cask.functions.Global; import co.cask.functions.JSON; import com.google.common.base.Joiner; import com.google.common.base.Strings; import org.apache.commons.jexl3.JexlBuilder; import org.apache.commons.jexl3.JexlEngine; import org.apache.commons.jexl3.JexlException; import org.apache.commons.jexl3.JexlScript; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringEscapeUtils; import org.apache.commons.logging.Log; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * This class <code>EL</code> is a Expression Language Handler. */ public final class EL { private static final Logger LOG = LoggerFactory.getLogger(EL.class); private Set<String> variables = new HashSet<>(); private final JexlEngine engine; private JexlScript script = null; public EL(ELRegistration registration) { engine = new JexlBuilder() .namespaces(registration.functions()) .silent(false) .cache(1024) .strict(true) .logger(new NullLogger()) .create(); } public void compile(String expression) throws ELException { variables.clear(); try { script = engine.createScript(expression); Set<List<String>> varSet = script.getVariables(); for (List<String> vars : varSet) { variables.add(Joiner.on(".").join(vars)); } } catch (JexlException e) { if (e.getCause() != null) { throw new ELException(e.getCause().getMessage()); } else { throw new ELException(e.getMessage()); } } catch (Exception e) { throw new ELException(e.getMessage()); } } public Set<String> variables() { return variables; } public ELResult execute(ELContext context, boolean nullMissingFields) throws ELException { try { if(nullMissingFields) { for(String variable : variables) { if (!context.has(variable)) { context.add(variable, null); } } } Object value = script.execute(context); ELResult variable = new ELResult(value); return variable; } catch (JexlException e) { // Generally JexlException wraps the original exception, so it's good idea // to check if there is a inner exception, if there is wrap it in 'DirectiveExecutionException' // else just print the error message. if (e.getCause() != null) { throw new ELException(e.getCause().getMessage()); } else { throw new ELException(e.getMessage()); } } catch (NumberFormatException e) { throw new ELException("Type mismatch. Change type of constant " + "or convert to right data type using conversion functions available. Reason : " + e.getMessage()); } catch (Exception e) { if (e.getCause() != null) { throw new ELException(e.getCause().getMessage()); } else { throw new ELException(e.getMessage()); } } } public ELResult execute(ELContext context) throws ELException { return execute(context, true); } /** * @return List of registered functions. */ public static final class DefaultFunctions implements ELRegistration { @Override public Map<String, Object> functions() { Map<String, Object> functions = new HashMap<>(); functions.put(null, Global.class); functions.put("date", Dates.class); functions.put("json", JSON.class); functions.put("math", Math.class); functions.put("string", StringUtils.class); functions.put("strings", Strings.class); functions.put("escape", StringEscapeUtils.class); functions.put("bytes", Bytes.class); functions.put("arrays", Arrays.class); functions.put("dq", DataQuality.class); functions.put("ddl", DDL.class); functions.put("geo", GeoFences.class); return functions; } } private final class NullLogger implements Log { @Override public void debug(Object o) { } @Override public void debug(Object o, Throwable throwable) { } @Override public void error(Object o) { } @Override public void error(Object o, Throwable throwable) { } @Override public void fatal(Object o) { } @Override public void fatal(Object o, Throwable throwable) { } @Override public void info(Object o) { } @Override public void info(Object o, Throwable throwable) { } @Override public boolean isDebugEnabled() { return false; } @Override public boolean isErrorEnabled() { return false; } @Override public boolean isFatalEnabled() { return false; } @Override public boolean isInfoEnabled() { return false; } @Override public boolean isTraceEnabled() { return false; } @Override public boolean isWarnEnabled() { return false; } @Override public void trace(Object o) { } @Override public void trace(Object o, Throwable throwable) { } @Override public void warn(Object o) { } @Override public void warn(Object o, Throwable throwable) { } } }
SURGroup/UncertaintyQuantification
src/UQpy/distributions/__init__.py
<reponame>SURGroup/UncertaintyQuantification # pylint: disable=wildcard-import from UQpy.distributions.baseclass import * from UQpy.distributions.copulas import * from UQpy.distributions.collection import * from . import baseclass, copulas, collection
dupliaka/droolsjbpm-integration
kie-plugins-testing/src/test/java/org/drools/compiler/xpath/tobeinstrumented/InjectReactiveMojoConfigTest.java
package org.drools.compiler.xpath.tobeinstrumented; import java.util.Arrays; import java.util.List; import org.drools.core.phreak.ReactiveObject; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.junit.Assert.*; import static org.kie.maven.plugin.InjectReactiveMojo.convertAllToPkgRegExps; import static org.kie.maven.plugin.InjectReactiveMojo.isPackageNameIncluded; public class InjectReactiveMojoConfigTest { private static Logger logger = LoggerFactory.getLogger(InjectReactiveMojoConfigTest.class); @Test public void testRegexpForPackagesDefault() { String[] inputConfig = new String[]{"*"}; List<String> config = convertAllToPkgRegExps(inputConfig); logger.info(config.toString()); assertTrue(isPackageNameIncluded(Object.class.getPackage().getName(), config)); assertTrue(isPackageNameIncluded(ReactiveObject.class.getPackage().getName(), config)); assertTrue(isPackageNameIncluded("xyz.my", config)); } @Test public void testRegexpForPackagesSingleNoStars() { String[] inputConfig = new String[]{"org.drools"}; List<String> config = convertAllToPkgRegExps(inputConfig); logger.info(config.toString()); assertFalse(isPackageNameIncluded(Object.class.getPackage().getName(), config)); assertFalse(isPackageNameIncluded(ReactiveObject.class.getPackage().getName(), config)); assertFalse(isPackageNameIncluded("xyz.my", config)); } @Test public void testRegexpForPackagesMultipleNoStars() { String[] inputConfig = new String[]{"org.drools", "xyz.my"}; List<String> config = convertAllToPkgRegExps(inputConfig); logger.info(config.toString()); assertFalse(isPackageNameIncluded(Object.class.getPackage().getName(), config)); assertFalse(isPackageNameIncluded(ReactiveObject.class.getPackage().getName(), config)); assertTrue(isPackageNameIncluded("xyz.my", config)); } @Test public void testRegexpForPackagesSingleStars() { String[] inputConfig = new String[]{"org.drools.*"}; List<String> config = convertAllToPkgRegExps(inputConfig); logger.info(config.toString()); assertFalse(isPackageNameIncluded(Object.class.getPackage().getName(), config)); assertTrue(isPackageNameIncluded(ReactiveObject.class.getPackage().getName(), config)); assertFalse(isPackageNameIncluded("xyz.my", config)); } @Test public void testRegexpForPackagesMultipleStars() { String[] inputConfig = new String[]{"org.drools.*", "xyz.my.*"}; List<String> config = convertAllToPkgRegExps(inputConfig); logger.info(config.toString()); assertFalse(isPackageNameIncluded(Object.class.getPackage().getName(), config)); assertTrue(isPackageNameIncluded(ReactiveObject.class.getPackage().getName(), config)); assertTrue(isPackageNameIncluded("xyz.my", config)); } @Test public void testRegexpForPackagesCheckPart() { String[] inputConfig = new String[]{"my"}; List<String> config = convertAllToPkgRegExps(inputConfig); logger.info(config.toString()); assertFalse(isPackageNameIncluded(Object.class.getPackage().getName(), config)); assertFalse(isPackageNameIncluded(ReactiveObject.class.getPackage().getName(), config)); assertFalse(isPackageNameIncluded("xyz.my", config)); } @Test public void testRegexpForPackagesCheckNaming() { String[] inputConfig = new String[]{"org.drools", "to.instrument.*"}; List<String> config = convertAllToPkgRegExps(inputConfig); logger.info(config.toString()); assertFalse(isPackageNameIncluded(Object.class.getPackage().getName(), config)); assertFalse(isPackageNameIncluded(ReactiveObject.class.getPackage().getName(), config)); assertFalse(isPackageNameIncluded("xyz.my", config)); assertTrue(isPackageNameIncluded("to.instrument", config)); assertFalse(isPackageNameIncluded("to.not.instrument", config)); } }
amcclead7336/Enterprise_Data_Science_Final
venv/lib/python3.8/site-packages/vsts/gallery/v4_1/models/extension_acquisition_request.py
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- # Generated file, DO NOT EDIT # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------------------------- from msrest.serialization import Model class ExtensionAcquisitionRequest(Model): """ExtensionAcquisitionRequest. :param assignment_type: How the item is being assigned :type assignment_type: object :param billing_id: The id of the subscription used for purchase :type billing_id: str :param item_id: The marketplace id (publisherName.extensionName) for the item :type item_id: str :param operation_type: The type of operation, such as install, request, purchase :type operation_type: object :param properties: Additional properties which can be added to the request. :type properties: :class:`object <gallery.v4_1.models.object>` :param quantity: How many licenses should be purchased :type quantity: int :param targets: A list of target guids where the item should be acquired (installed, requested, etc.), such as account id :type targets: list of str """ _attribute_map = { 'assignment_type': {'key': 'assignmentType', 'type': 'object'}, 'billing_id': {'key': 'billingId', 'type': 'str'}, 'item_id': {'key': 'itemId', 'type': 'str'}, 'operation_type': {'key': 'operationType', 'type': 'object'}, 'properties': {'key': 'properties', 'type': 'object'}, 'quantity': {'key': 'quantity', 'type': 'int'}, 'targets': {'key': 'targets', 'type': '[str]'} } def __init__(self, assignment_type=None, billing_id=None, item_id=None, operation_type=None, properties=None, quantity=None, targets=None): super(ExtensionAcquisitionRequest, self).__init__() self.assignment_type = assignment_type self.billing_id = billing_id self.item_id = item_id self.operation_type = operation_type self.properties = properties self.quantity = quantity self.targets = targets
fadi-alkhoury/coin-or-cbc-with-cmake
Cbc/Clp/examples/testGub2.cpp
<filename>Cbc/Clp/examples/testGub2.cpp /* $Id: testGub2.cpp 2278 2017-10-02 09:51:14Z forrest $ */ // Copyright (C) 2003, International Business Machines // Corporation and others. All Rights Reserved. // This code is licensed under the terms of the Eclipse Public License (EPL). #include "ClpSimplex.hpp" #include "ClpGubDynamicMatrix.hpp" #include "ClpPrimalColumnSteepest.hpp" #include "CoinSort.hpp" #include "CoinHelperFunctions.hpp" #include "CoinTime.hpp" #include "CoinMpsIO.hpp" int main(int argc, const char *argv[]) { #if COIN_BIG_INDEX<2 ClpSimplex model; int status; int maxIts = 0; int maxFactor = 100; if (argc < 2) { #if defined(SAMPLEDIR) status = model.readMps(SAMPLEDIR "/p0033.mps", true); #else fprintf(stderr, "Do not know where to find sample MPS files.\n"); exit(1); #endif } else status = model.readMps(argv[1]); if (status) { printf("errors on input\n"); exit(77); } if (argc > 2) { maxFactor = atoi(argv[2]); printf("max factor %d\n", maxFactor); } if (argc > 3) { maxIts = atoi(argv[3]); printf("max its %d\n", maxIts); } // For now scaling off model.scaling(0); if (maxIts) { // Do partial dantzig ClpPrimalColumnSteepest dantzig(5); model.setPrimalColumnPivotAlgorithm(dantzig); //model.messageHandler()->setLogLevel(63); model.setFactorizationFrequency(maxFactor); model.setMaximumIterations(maxIts); model.primal(); if (!model.status()) exit(1); } // find gub int numberRows = model.numberRows(); int * gubStart = new int[numberRows+1]; int * gubEnd = new int[numberRows]; int * which = new int[numberRows]; int * whichGub = new int[numberRows]; int numberColumns = model.numberColumns(); int * mark = new int[numberColumns]; int iRow, iColumn; // delete variables fixed to zero const double * columnLower = model.columnLower(); const double * columnUpper = model.columnUpper(); int numberDelete = 0; for (iColumn = 0; iColumn < numberColumns; iColumn++) { if (columnUpper[iColumn] == 0.0 && columnLower[iColumn] == 0.0) mark[numberDelete++] = iColumn; } if (numberDelete) { model.deleteColumns(numberDelete, mark); numberColumns -= numberDelete; columnLower = model.columnLower(); columnUpper = model.columnUpper(); #if 0 CoinMpsIO writer; writer.setMpsData(*model.matrix(), COIN_DBL_MAX, model.getColLower(), model.getColUpper(), model.getObjCoefficients(), (const char*) 0 /*integrality*/, model.getRowLower(), model.getRowUpper(), NULL, NULL); writer.writeMps("cza.mps", 0, 0, 1); #endif } double * lower = new double[numberRows]; double * upper = new double[numberRows]; const double * rowLower = model.rowLower(); const double * rowUpper = model.rowUpper(); for (iColumn = 0; iColumn < numberColumns; iColumn++) mark[iColumn] = -1; CoinPackedMatrix * matrix = model.matrix(); // get row copy CoinPackedMatrix rowCopy = *matrix; rowCopy.reverseOrdering(); const int * column = rowCopy.getIndices(); const int * rowLength = rowCopy.getVectorLengths(); const CoinBigIndex * rowStart = rowCopy.getVectorStarts(); const double * element = rowCopy.getElements(); int putGub = numberRows; int putNonGub = numberRows; int * rowIsGub = new int [numberRows]; for (iRow = numberRows - 1; iRow >= 0; iRow--) { bool gubRow = true; int first = numberColumns + 1; int last = -1; for (int j = rowStart[iRow]; j < rowStart[iRow] + rowLength[iRow]; j++) { if (element[j] != 1.0) { gubRow = false; break; } else { int iColumn = column[j]; if (mark[iColumn] >= 0) { gubRow = false; break; } else { last = CoinMax(last, iColumn); first = CoinMin(first, iColumn); } } } if (last - first + 1 != rowLength[iRow] || !gubRow) { which[--putNonGub] = iRow; rowIsGub[iRow] = 0; } else { for (int j = rowStart[iRow]; j < rowStart[iRow] + rowLength[iRow]; j++) { int iColumn = column[j]; mark[iColumn] = iRow; } rowIsGub[iRow] = -1; putGub--; gubStart[putGub] = first; gubEnd[putGub] = last + 1; lower[putGub] = rowLower[iRow]; upper[putGub] = rowUpper[iRow]; whichGub[putGub] = iRow; } } int numberNonGub = numberRows - putNonGub; int numberGub = numberRows - putGub; if (numberGub > 0) { printf("** %d gub rows\n", numberGub); int numberNormal = 0; const int * row = matrix->getIndices(); const int * columnLength = matrix->getVectorLengths(); const CoinBigIndex * columnStart = matrix->getVectorStarts(); const double * elementByColumn = matrix->getElements(); int numberElements = 0; bool doLower = false; bool doUpper = false; for (iColumn = 0; iColumn < numberColumns; iColumn++) { if (mark[iColumn] < 0) { mark[numberNormal++] = iColumn; } else { numberElements += columnLength[iColumn]; if (columnLower[iColumn] != 0.0) doLower = true; if (columnUpper[iColumn] < 1.0e20) doUpper = true; } } if (!numberNormal) { printf("Putting back one gub row to make non-empty\n"); for (iColumn = gubStart[putGub]; iColumn < gubEnd[putGub]; iColumn++) mark[numberNormal++] = iColumn; putGub++; numberGub--; } ClpSimplex model2(&model, numberNonGub, which + putNonGub, numberNormal, mark); int numberGubColumns = numberColumns - numberNormal; // sort gubs so monotonic int * which = new int[numberGub]; int i; for (i = 0; i < numberGub; i++) which[i] = i; CoinSort_2(gubStart + putGub, gubStart + putGub + numberGub, which); int * temp1 = new int [numberGub]; for (i = 0; i < numberGub; i++) { int k = which[i]; temp1[i] = gubEnd[putGub+k]; } memcpy(gubEnd + putGub, temp1, numberGub * sizeof(int)); delete [] temp1; double * temp2 = new double [numberGub]; for (i = 0; i < numberGub; i++) { int k = which[i]; temp2[i] = lower[putGub+k]; } memcpy(lower + putGub, temp2, numberGub * sizeof(double)); for (i = 0; i < numberGub; i++) { int k = which[i]; temp2[i] = upper[putGub+k]; } memcpy(upper + putGub, temp2, numberGub * sizeof(double)); delete [] temp2; delete [] which; numberElements -= numberGubColumns; int * start2 = new int[numberGubColumns+1]; int * row2 = new int[numberElements]; double * element2 = new double[numberElements]; double * cost2 = new double [numberGubColumns]; double * lowerColumn2 = NULL; if (doLower) { lowerColumn2 = new double [numberGubColumns]; CoinFillN(lowerColumn2, numberGubColumns, 0.0); } double * upperColumn2 = NULL; if (doUpper) { upperColumn2 = new double [numberGubColumns]; CoinFillN(upperColumn2, numberGubColumns, COIN_DBL_MAX); } numberElements = 0; int numberNonGubRows = 0; for (iRow = 0; iRow < numberRows; iRow++) { if (!rowIsGub[iRow]) rowIsGub[iRow] = numberNonGubRows++; } numberColumns = 0; gubStart[0] = 0; start2[0] = 0; const double * cost = model.objective(); for (int iSet = 0; iSet < numberGub; iSet++) { int iStart = gubStart[iSet+putGub]; int iEnd = gubEnd[iSet+putGub]; for (int k = iStart; k < iEnd; k++) { cost2[numberColumns] = cost[k]; if (columnLower[k]) lowerColumn2[numberColumns] = columnLower[k]; if (columnUpper[k] < 1.0e20) upperColumn2[numberColumns] = columnUpper[k]; for (int j = columnStart[k]; j < columnStart[k] + columnLength[k]; j++) { int iRow = rowIsGub[row[j]]; if (iRow >= 0) { row2[numberElements] = iRow; element2[numberElements++] = elementByColumn[j]; } } start2[++numberColumns] = numberElements; } gubStart[iSet+1] = numberColumns; } model2.replaceMatrix(new ClpGubDynamicMatrix(&model2, numberGub, numberColumns, gubStart, lower + putGub, upper + putGub, start2, row2, element2, cost2, lowerColumn2, upperColumn2)); delete [] rowIsGub; delete [] start2; delete [] row2; delete [] element2; delete [] cost2; delete [] lowerColumn2; delete [] upperColumn2; // For now scaling off model2.scaling(0); // Do partial dantzig ClpPrimalColumnSteepest dantzig(5); model2.setPrimalColumnPivotAlgorithm(dantzig); //model2.messageHandler()->setLogLevel(63); model2.setFactorizationFrequency(maxFactor); model2.setMaximumIterations(4000000); double time1 = CoinCpuTime(); model2.primal(); { ClpGubDynamicMatrix * gubMatrix = dynamic_cast< ClpGubDynamicMatrix*>(model2.clpMatrix()); assert(gubMatrix); const double * solution = model2.primalColumnSolution(); int numberGubColumns = gubMatrix->numberGubColumns(); int firstOdd = gubMatrix->firstDynamic(); int lastOdd = gubMatrix->firstAvailable(); int numberTotalColumns = firstOdd + numberGubColumns; int numberRows = model2.numberRows(); char * status = new char [numberTotalColumns]; double * gubSolution = new double [numberTotalColumns]; int numberSets = gubMatrix->numberSets(); const int * id = gubMatrix->id(); int i; const double * lowerColumn = gubMatrix->lowerColumn(); const double * upperColumn = gubMatrix->upperColumn(); for (i = 0; i < numberGubColumns; i++) { if (gubMatrix->getDynamicStatus(i) == ClpGubDynamicMatrix::atUpperBound) { gubSolution[i+firstOdd] = upperColumn[i]; status[i+firstOdd] = 2; } else if (gubMatrix->getDynamicStatus(i) == ClpGubDynamicMatrix::atLowerBound && lowerColumn) { gubSolution[i+firstOdd] = lowerColumn[i]; status[i+firstOdd] = 1; } else { gubSolution[i+firstOdd] = 0.0; status[i+firstOdd] = 1; } } for (i = 0; i < firstOdd; i++) { ClpSimplex::Status thisStatus = model2.getStatus(i); if (thisStatus == ClpSimplex::basic) status[i] = 0; else if (thisStatus == ClpSimplex::atLowerBound) status[i] = 1; else if (thisStatus == ClpSimplex::atUpperBound) status[i] = 2; else if (thisStatus == ClpSimplex::isFixed) status[i] = 3; else abort(); gubSolution[i] = solution[i]; } for (i = firstOdd; i < lastOdd; i++) { int iBig = id[i-firstOdd] + firstOdd; ClpSimplex::Status thisStatus = model2.getStatus(i); if (thisStatus == ClpSimplex::basic) status[iBig] = 0; else if (thisStatus == ClpSimplex::atLowerBound) status[iBig] = 1; else if (thisStatus == ClpSimplex::atUpperBound) status[iBig] = 2; else if (thisStatus == ClpSimplex::isFixed) status[iBig] = 3; else abort(); gubSolution[iBig] = solution[i]; } char * rowStatus = new char[numberRows]; for (i = 0; i < numberRows; i++) { ClpSimplex::Status thisStatus = model2.getRowStatus(i); if (thisStatus == ClpSimplex::basic) rowStatus[i] = 0; else if (thisStatus == ClpSimplex::atLowerBound) rowStatus[i] = 1; else if (thisStatus == ClpSimplex::atUpperBound) rowStatus[i] = 2; else if (thisStatus == ClpSimplex::isFixed) rowStatus[i] = 3; else abort(); } char * setStatus = new char[numberSets]; int * keyVariable = new int[numberSets]; memcpy(keyVariable, gubMatrix->keyVariable(), numberSets * sizeof(int)); for (i = 0; i < numberSets; i++) { int iKey = keyVariable[i]; if (iKey > lastOdd) iKey = numberTotalColumns + i; else iKey = id[iKey-firstOdd] + firstOdd; keyVariable[i] = iKey; ClpSimplex::Status thisStatus = gubMatrix->getStatus(i); if (thisStatus == ClpSimplex::basic) setStatus[i] = 0; else if (thisStatus == ClpSimplex::atLowerBound) setStatus[i] = 1; else if (thisStatus == ClpSimplex::atUpperBound) setStatus[i] = 2; else if (thisStatus == ClpSimplex::isFixed) setStatus[i] = 3; else abort(); } FILE * fp = fopen("xx.sol", "w"); fwrite(gubSolution, sizeof(double), numberTotalColumns, fp); fwrite(status, sizeof(char), numberTotalColumns, fp); const double * rowsol = model2.primalRowSolution(); int originalNumberRows = model.numberRows(); double * rowsol2 = new double[originalNumberRows]; memset(rowsol2, 0, originalNumberRows * sizeof(double)); model.times(1.0, gubSolution, rowsol2); for (i = 0; i < numberRows; i++) assert(fabs(rowsol[i] - rowsol2[i]) < 1.0e-3); //for (;i<originalNumberRows;i++) //printf("%d %g\n",i,rowsol2[i]); delete [] rowsol2; fwrite(rowsol, sizeof(double), numberRows, fp); fwrite(rowStatus, sizeof(char), numberRows, fp); fwrite(setStatus, sizeof(char), numberSets, fp); fwrite(keyVariable, sizeof(int), numberSets, fp); fclose(fp); delete [] status; delete [] gubSolution; delete [] setStatus; delete [] keyVariable; // ** if going to rstart as dynamic need id_ // also copy coding in useEf.. from ClpGubMatrix (i.e. test for basis) } printf("obj offset is %g\n", model2.objectiveOffset()); printf("Primal took %g seconds\n", CoinCpuTime() - time1); //model2.primal(1); } delete [] mark; delete [] gubStart; delete [] gubEnd; delete [] which; delete [] whichGub; delete [] lower; delete [] upper; #else printf("testGub2 not available with COIN_BIG_INDEX=2\n"); #endif return 0; }
hei1233212000/avaje-ebeanorm
src/test/java/com/avaje/tests/model/selfref/SelfParent.java
<filename>src/test/java/com/avaje/tests/model/selfref/SelfParent.java package com.avaje.tests.model.selfref; import java.util.List; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.persistence.Version; @Entity @Table(name="self_parent") public class SelfParent { @Id Long id; @Version Long version; String name; @ManyToOne() SelfParent parent; @OneToMany(mappedBy="parent") List<SelfParent> children; public SelfParent(String name, SelfParent parent) { this.name = name; this.parent = parent; } public SelfParent() { } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Long getVersion() { return version; } public void setVersion(Long version) { this.version = version; } public String getName() { return name; } public void setName(String name) { this.name = name; } public SelfParent getParent() { return parent; } public void setParent(SelfParent parent) { this.parent = parent; } public List<SelfParent> getChildren() { return children; } public void setChildren(List<SelfParent> children) { this.children = children; } }
gadge/spare
packages/debugger/says/test/conversation.says.test.js
import { BOLD } from '@palett/enum-font-effects' import { Deco } from '@spare/deco' import { LF } from '@spare/enum-chars' import { logger, logNeL } from '@spare/logger' import { says } from '../dist/index.esm' // const says = Says.build({ effects: [BOLD] }) 'what to do' |> says.chef.to(says.aboyeur).to(says.worker) 'how would i know' |> says.worker.asc.to(says.chef) 'i\'ll be there tmr' |> says.worker 'anything i can do for you' |> says.tournant.asc.asc 'no,\n but you just stand by, \nand wait for order' |> says.aboyeur 'yes' |> says.tournant LF + 'registered roster' |> logger says.roster() |> Deco({ vo: 1 }) |> logNeL
time-link/timelink-py
tests/test_mhk_models_no_db.py
<reponame>time-link/timelink-py<filename>tests/test_mhk_models_no_db.py """ Test models with requiring a db connection """ import pytest from timelink.mhk.models import base # noqa from timelink.mhk.models.entity import Entity # noqa from timelink.mhk.models.pom_som_mapper import PomSomMapper from timelink.mhk.models.base_class import Base from timelink.mhk.models.db import TimelinkDB def test_entity_subclasses(): scl = list(Entity.get_subclasses()) sc1 = len(scl) class SubEntity(Entity): pass scl2 = list(Entity.get_subclasses()) sc2 = len(scl2) assert sc2 == sc1 + 1, "wrong direct subclasses of Entity" class SubSubEntity(SubEntity): pass scl3 = list(Entity.get_subclasses()) sc3 = len(scl3) assert sc3 == sc2 + 1, "wrong recursive subclasses of Entity"
getspooky/Reactrix
src/hash/__tests__/sha1.test.js
import { validateRules, getStackError } from '../../common'; let sha1Rule = null; afterEach(() => sha1Rule = null); test('validates that the string is a sha1 algorithm', () => { // valid. sha1Rule = validateRules('265240e109be635c0cb29fabeb8586f234a5d91c', 'sha1'); expect(getStackError(sha1Rule)).toBe(0); // valid. sha1Rule = validateRules('f7ff9e8b7bb2e09b70935a5d785e0cc5d9d0abf0', 'sha1'); expect(getStackError(sha1Rule)).toBe(0); });
bear1704/DX11_Portfolio
include/maxsdk/Rendering/RendProgressCallback.h
////////////////////////////////////////////////////////////////////////////// // // Copyright 2015 Autodesk, Inc. All rights reserved. // // Use of this software is subject to the terms of the Autodesk license // agreement provided at the time of installation or download, or which // otherwise accompanies this software in either electronic or hard copy form. // ////////////////////////////////////////////////////////////////////////////// #pragma once #include "../MaxHeap.h" #include "../3dsmaxconfig.h" #include "../strbasic.h" /// \defgroup Values_returned_from_Progress__ Values Returned from Progress() //@{ #define RENDPROG_CONTINUE 1 //!< Continue to Process #define RENDPROG_ABORT 0 //!< Stop Processing //@} /// \defgroup Values_passed_to_SetCurField__ Values Passed to SetCurField() //@{ #define FIELD_FIRST 0 #define FIELD_SECOND 1 #define FIELD_NONE -1 //@} /*! This class is a callback passed in to the renderer. The system passes this callback to the renderer, and the renderer will use these methods whenever it is doing something that is going to take some time. For instance when transforming objects it can update the progress bar. This is also passed in to the shadow buffer code so the shadow buffer can show its progress. All methods of this class are implemented by the system. They are called by a plug-in renderer. \see Class Renderer.*/ class RendProgressCallback: public MaxHeapOperators { public: /*! Destructor. */ virtual ~RendProgressCallback() { } /*! Allows the plug-in to set the string displayed in renderer dialog. \par Parameters: <b>const MCHAR *title</b>\n\n The string to display. Multiple title strings can be provided by separating them with newline ('\n') characters. The progress callback is then responsible for displaying these strings correctly, e.g. by alternating them every few seconds. */ virtual void SetTitle(const MCHAR *title)=0; /*! Allows the plug-in to update the renderer progress display. \par Parameters: <b>int done</b>\n\n The number of items completed so far.\n\n <b>int total</b>\n\n The total number of items to process. For undeterminate (infinite render,...) progress, passing -1 as the total will trigger the marquee mode of the progress bar. \return <b>RENDPROG_CONTINUE</b>\n\n Continue to process.\n\n <b>RENDPROG_ABORT</b>\n\n Stop processing. */ virtual int Progress(int done, int total)=0; /*! Sets the field number display. \par Parameters: <b>int which</b>\n\n <b>FIELD_FIRST</b>\n\n <b>FIELD_SECOND</b>\n\n <b>FIELD_NONE</b> */ virtual void SetCurField(int /*which*/) {} /*! The plug-in renderer should call this on every frame, passing in values for the various parameters. These are displayed in the rendering in progress dialog. \par Parameters: <b>int nlights</b>\n\n The total number of lights.\n\n <b>int nrayTraced</b>\n\n The number of lights using raytraced shadows.\n\n <b>int nshadowed</b>\n\n The number of lights using shadows.\n\n <b>int nobj</b>\n\n The total number of objects.\n\n <b>int nfaces</b>\n\n The total number of faces. */ virtual void SetSceneStats(int /*nlights*/, int /*nrayTraced*/, int /*nshadowed*/, int /*nobj*/, int /*nfaces*/) {} /*! Sets the current rendering process step. This is used by the logging to give a global picture of the rendering process progress, not just of the current task. Let's say your rendering process is two steps (Translation and rendering) You would call this function at the beginning of the translation with (1,2) as parameters. Then use the Progress function as usual. Calling SetStep(2,2) at the beginning of the rendering task. This enable 3ds Max to know the total progress of the rendering task. \par Parameters: <b>int current</b>\n\n One based current step number. Passing zero or a negative value here will make the application skip the progress logging. <b>int total</b>\n\n One based total step count. Passing zero, negative value or a value lower than current will make the application skip the progress logging*/ virtual void SetStep(int current, int total) = 0; }; //! \deprecated in 3ds Max 2017. All methods of RendProgressCallback2 have been moved to class RendProgressCallback. typedef MAX_DEPRECATED RendProgressCallback RendProgressCallback2;
wanli945/zhongcheng
src/views/report/inflow/order/data/mock.js
<filename>src/views/report/inflow/order/data/mock.js /* * @IDE : WebStorm * @time : 2021/2/1 15:24 * @author : 李艳鹏 * @Software : app * @File : mock * @describe : */ import Mock from "mockjs" import {getD2D} from "../../data/mock" export const headerData = { citySelect: [ {label: "浙江", value: "浙江"}, {label: "福建", value: "福建"}, ], dataArr: [ {name: '今天', value: 'day'}, {name: '日', value: 'day'}, {name: '周', value: 'week'}, {name: '月', value: 'month'}, ] } export const orderBoardData = () => { return [ { title: '工单数', "total": Mock.mock('@integer(100, 800)'), "rate": Mock.mock('@float(-1, 1, 2)'), category: "order", cur: true }, { title: '已提交', "total": Mock.mock('@integer(100, 800)'), "rate": Mock.mock('@float(-1, 1, 2)'), category: "submitted" }, { title: '开通中', "total": Mock.mock('@integer(60, 100)') + "%", "rate": Mock.mock('@float(-1, 1, 2)'), category: "opening" }, { title: '已归档', total: Mock.mock('@integer(60, 100)'), rate: Mock.mock('@float(-1, 1, 2)'), category: "archived" }, { title: '竣工率', total: Mock.mock('@integer(60, 100)') + "%", rate: Mock.mock('@float(-1, 1, 2)'), category: "chargebackRate" }, { title: '退单', total: Mock.mock('@integer(60, 100)'), rate: Mock.mock('@float(-1, 1, 2)'), category: "chargeback" }, { title: '退单率', total: Mock.mock('@integer(60, 100)') + "%", rate: Mock.mock('@float(-1, 1, 2)'), category: "chargebackRate" } ] } export const getPolyline = async ({channel, platType, time, timeType, dateRange}, that) => { return getD2D(dateRange, that) } export const getRank = async (data) => { let dataArr = []; for (let i = 0; i <= Mock.mock('@natural(100, 200)'); i++) { dataArr.push({ serverName: Mock.mock('@cname()'), "已提交": Mock.mock('@natural(80, 200)'), "已归档": Mock.mock('@natural(80, 200)'), "退单": Mock.mock('@natural(80, 200)'), }) } return { dataArr } } export const getCityRank = () => { let dataArr = []; for (let i = 0; i <= Mock.mock('@natural(10, 15)'); i++) { dataArr.push({ city: Mock.mock('@city'), num: Mock.mock('@natural(80, 200)'), }) } return { dataArr } } export const getProduct = ({city, channel}) => { let dataArr = []; let c4List = []; for (let i = 0; i <= 10; i++) { dataArr.push({ productName: Mock.mock('@datetime("yyyyMM-")') + Mock.mock('@ctitle(10, 12)') + `【${city} ${channel}】`, commodityName: Mock.mock('@datetime("yyyyMM-")') + Mock.mock('@ctitle(10, 12)') + `【${city} ${channel}】`, num: Mock.mock('@integer(100,500)'), rate: Mock.mock('@float(60, 99, 2,2)') + "%" }) let ms = Mock.mock('@city'); c4List.push({ label: ms, value: ms }) } return { dataArr, total: Mock.mock('@integer(60, 100)'), c4List }; } export const getPerformanceKanBan = () => { let dataArr = []; for (let i = 0; i < 10; i++) { dataArr.push({ advertisers: Mock.mock('@ctitle(10, 12)'), serverName: Mock.mock('@cname()'), orderNum: Mock.mock('@integer(400,500)'), archiveNum: Mock.mock('@integer(400,500)'), completionRate: Mock.mock('@float(60, 99, 2,2)') + "%", chargeback: Mock.mock('@integer(100,300)'), chargebackRate: Mock.mock('@float(60, 99, 2,2)') + "%", }) } return { dataArr, total: Mock.mock('@integer(300,200)') }; } export const productAnalysis = () => { let dataArr = []; let c4List = []; for (let i = 0; i < 10; i++) { dataArr.push({ productRate: Mock.mock('@integer(1,10)') + "00M", num: Mock.mock('@integer(400,500)'), rate: Mock.mock('@float(60, 99, 2,2)') + "%", }) let ms = Mock.mock('@city'); c4List.push({ label: ms, value: ms }) } return { dataArr, total: Mock.mock('@integer(300,200)'), c4List }; }
LaudateCorpus1/graphql-orchestrator-java
src/test/java/com/intuit/graphql/orchestrator/schema/transform/GraphqlAdapterTransformerTest.java
<gh_stars>10-100 package com.intuit.graphql.orchestrator.schema.transform; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.google.common.collect.ImmutableMap; import com.intuit.graphql.graphQL.ObjectTypeDefinition; import com.intuit.graphql.orchestrator.ServiceProvider.ServiceType; import com.intuit.graphql.orchestrator.TestServiceProvider; import com.intuit.graphql.orchestrator.schema.Operation; import com.intuit.graphql.orchestrator.schema.SchemaTransformationException; import com.intuit.graphql.orchestrator.schema.fold.XtextGraphFolder; import com.intuit.graphql.orchestrator.schema.transform.GraphQLAdapterTransformer.AdapterDirectiveVisitor; import com.intuit.graphql.orchestrator.xtext.DataFetcherContext; import com.intuit.graphql.orchestrator.xtext.DataFetcherContext.DataFetcherType; import com.intuit.graphql.orchestrator.xtext.FieldContext; import com.intuit.graphql.orchestrator.xtext.XtextGraph; import com.intuit.graphql.orchestrator.xtext.XtextGraphBuilder; import java.util.Arrays; import java.util.Collections; import lombok.extern.slf4j.Slf4j; import org.junit.Test; @Slf4j public class GraphqlAdapterTransformerTest { public static String directive = "directive @adapter(service:String!) on FIELD_DEFINITION"; @Test public void testAdapterDirectiveNested() { String schema = "schema { query: Query } type Query { a: A } " + "type A { b: B } type B {c: C}" + "type C { adapter1: D @adapter(service: 'foo'), adapter2: D @adapter(service: 'bar') }" + "type D { field: String}" + directive; String schema2 = "schema { query: Query } type Query { a: A } " + "type A { bb: BB } type BB {cc: String}"; XtextGraph xtextGraph = XtextGraphBuilder .build(TestServiceProvider.newBuilder().namespace("SVC_b").serviceType(ServiceType.REST) .sdlFiles(ImmutableMap.of("schema.graphqls", schema)).build()); XtextGraph xtextGraph2 = XtextGraphBuilder .build(TestServiceProvider.newBuilder().namespace("SVC_bb").serviceType(ServiceType.REST) .sdlFiles(ImmutableMap.of("schema.graphqls", schema2)).build()); XtextGraph stitchedGraph = new XtextGraphFolder() .fold(XtextGraph.emptyGraph(), Arrays.asList(xtextGraph, xtextGraph2)); XtextGraph adapterGraph = new GraphQLAdapterTransformer().transform(stitchedGraph); ObjectTypeDefinition query = adapterGraph.getOperationMap().get(Operation.QUERY); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("Query", "a")).getDataFetcherType()).isEqualTo( DataFetcherType.STATIC); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("A", "b")).getDataFetcherType()).isEqualTo( DataFetcherType.STATIC); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("A", "bb")).getDataFetcherType()).isEqualTo( DataFetcherType.SERVICE); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("A", "bb")).getNamespace()).isEqualTo( "SVC_bb"); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("B", "c")).getDataFetcherType()).isEqualTo( DataFetcherType.STATIC); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("C", "adapter1")).getDataFetcherType()).isEqualTo( DataFetcherType.SERVICE); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("C", "adapter2")).getDataFetcherType()).isEqualTo( DataFetcherType.SERVICE); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("C", "adapter1")).getNamespace()).isEqualTo( "SVC_b"); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("C", "adapter2")).getNamespace()).isEqualTo( "SVC_b"); } @Test public void testAdapterDirectiveAtSameLevelAsRest() { String schema = "schema { query: Query } type Query { a: A } " + "type A { b: B @adapter(service: 'foo') } type B {d: D}" + "type D { field: String}" + directive; String schema2 = "schema { query: Query } type Query { a: A } " + "type A { bb: BB } type BB {cc: String}"; XtextGraph xtextGraph = XtextGraphBuilder .build(TestServiceProvider.newBuilder().namespace("SVC_b").serviceType(ServiceType.REST) .sdlFiles(ImmutableMap.of("schema.graphqls", schema)).build()); XtextGraph xtextGraph2 = XtextGraphBuilder .build(TestServiceProvider.newBuilder().namespace("SVC_bb").serviceType(ServiceType.REST) .sdlFiles(ImmutableMap.of("schema.graphqls", schema2)).build()); XtextGraph stitchedGraph = new XtextGraphFolder() .fold(XtextGraph.emptyGraph(), Arrays.asList(xtextGraph, xtextGraph2)); XtextGraph adapterGraph = new GraphQLAdapterTransformer().transform(stitchedGraph); ObjectTypeDefinition query = adapterGraph.getOperationMap().get(Operation.QUERY); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("Query", "a")).getDataFetcherType()).isEqualTo( DataFetcherType.STATIC); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("A", "b")).getDataFetcherType()).isEqualTo( DataFetcherType.SERVICE); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("A", "b")).getNamespace()).isEqualTo( "SVC_b"); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("A", "bb")).getDataFetcherType()).isEqualTo( DataFetcherType.SERVICE); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("A", "bb")).getNamespace()).isEqualTo( "SVC_bb"); } @Test public void testAdapterDirectiveOnlyOneRestService() { String schema = "schema { query: Query } type Query { a: A } " + "type A { b: B } type B {c: C} " + "type C { adapter1: D @adapter(service: 'foo'), adapter2: D @adapter(service: 'bar') }" + "type D { field: String}" + directive; FieldContext fieldContext = new FieldContext("Query", "a"); DataFetcherContext dataFetcherContext = DataFetcherContext.newBuilder().dataFetcherType(DataFetcherType.SERVICE) .namespace("SVC1") .serviceType(ServiceType.REST).build(); XtextGraph xtextGraph = XtextGraphBuilder .build(TestServiceProvider.newBuilder().namespace("SVC1").serviceType(ServiceType.REST) .sdlFiles(ImmutableMap.of("schema.graphqls", schema)).build()); xtextGraph.transform(builder -> builder.codeRegistry(ImmutableMap.of(fieldContext, dataFetcherContext))); XtextGraph stitchedGraph = new XtextGraphFolder() .fold(XtextGraph.emptyGraph(), Collections.singletonList(xtextGraph)); XtextGraph adapterGraph = new GraphQLAdapterTransformer().transform(stitchedGraph); ObjectTypeDefinition query = adapterGraph.getOperationMap().get(Operation.QUERY); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("Query", "a")).getDataFetcherType()).isEqualTo( DataFetcherType.STATIC); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("A", "b")).getDataFetcherType()).isEqualTo( DataFetcherType.STATIC); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("B", "c")).getDataFetcherType()).isEqualTo( DataFetcherType.STATIC); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("C", "adapter1")).getDataFetcherType()).isEqualTo( DataFetcherType.SERVICE); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("C", "adapter2")).getDataFetcherType()).isEqualTo( DataFetcherType.SERVICE); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("C", "adapter1")).getNamespace()).isEqualTo( "SVC1"); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("C", "adapter2")).getNamespace()).isEqualTo( "SVC1"); } @Test public void testAdapterTransformerWithoutDirective() { String schema = "schema { mutation: Mutation query: Query} " + "type Mutation { a: A } " + "type Query { a: A } " + "type A { b: B } type B {c: C} " + "type C { adapter1: D @adapter(service: 'foo'), adapter2: D @adapter(service: 'bar') }" + "type D { field: String }" + directive; FieldContext fieldContext = new FieldContext("Query", "a"); DataFetcherContext dataFetcherContext = DataFetcherContext.newBuilder() .dataFetcherType(DataFetcherType.SERVICE) .namespace("SVC1") .serviceType(ServiceType.REST) .build(); XtextGraph xtextGraph = XtextGraphBuilder .build(TestServiceProvider.newBuilder().namespace("SVC1").serviceType(ServiceType.REST) .sdlFiles(ImmutableMap.of("schema.graphqls", schema)).build()); xtextGraph.transform(builder -> builder.codeRegistry(ImmutableMap.of(fieldContext, dataFetcherContext))); XtextGraph stitchedGraph = new XtextGraphFolder() .fold(XtextGraph.emptyGraph(), Collections.singletonList(xtextGraph)); XtextGraph adapterGraph = new GraphQLAdapterTransformer().transform(stitchedGraph); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("Mutation", "a"))).isNotNull(); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("Mutation", "a")).getNamespace()).isEqualTo("SVC1"); } @Test public void testAdapterTransformerForNonRest() { String schema = "schema { query: Query } type Query { a: A } " + "type A { b: B } type B {c: C} " + "type C { adapter1: D @adapter(service: 'foo'), adapter2: D @adapter(service: 'bar') }" + "type D { field: String }" + directive; FieldContext fieldContext = new FieldContext("Query", "a"); DataFetcherContext dataFetcherContext = DataFetcherContext.newBuilder() .dataFetcherType(DataFetcherType.SERVICE) .namespace("SVC1") .serviceType(ServiceType.GRAPHQL) .build(); XtextGraph xtextGraph = XtextGraphBuilder .build(TestServiceProvider.newBuilder().namespace("SVC1").serviceType(ServiceType.REST) .sdlFiles(ImmutableMap.of("schema.graphqls", schema)).build()); xtextGraph.transform(builder -> builder.codeRegistry(ImmutableMap.of(fieldContext, dataFetcherContext))); XtextGraph stitchedGraph = new XtextGraphFolder() .fold(XtextGraph.emptyGraph(), Collections.singletonList(xtextGraph)); XtextGraph adapterGraph = new GraphQLAdapterTransformer().transform(stitchedGraph); ObjectTypeDefinition query = adapterGraph.getOperationMap().get(Operation.QUERY); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("Mutation", "a"))).isNull(); } @Test public void testAdapterTransformerWithoutField() { String schema = "schema { query: Query } type Query { a: A } " + "type A { b: B } type B {c: C} " + "type C { adapter1: D @adapter(service: 'foo'), adapter2: D @adapter(service: 'bar') }" + "type D { field: String }" + directive; FieldContext fieldContext = new FieldContext("Query", "foo"); DataFetcherContext dataFetcherContext = DataFetcherContext.newBuilder().dataFetcherType(DataFetcherType.SERVICE) .namespace("SVC1") .serviceType(ServiceType.REST).build(); XtextGraph xtextGraph = XtextGraphBuilder .build(TestServiceProvider.newBuilder().namespace("SVC1").serviceType(ServiceType.REST) .sdlFiles(ImmutableMap.of("schema.graphqls", schema)).build()); xtextGraph.transform(builder -> builder.codeRegistry(ImmutableMap.of(fieldContext, dataFetcherContext))); assertThatThrownBy(() -> new GraphQLAdapterTransformer().transform(xtextGraph)).isInstanceOf( SchemaTransformationException.class) .hasMessage(String.format(GraphQLAdapterTransformer.FIELD_NULL_ERROR, "foo")); } @Test public void testAdapterTransformerForMutation() { String schema = "schema { mutation: Mutation } type Mutation { a: A } " + "type A { b: B } type B {c: C} " + "type C { adapter1: D @adapter(service: 'foo'), adapter2: D @adapter(service: 'bar') }" + "type D { field: String }" + directive; FieldContext fieldContext = new FieldContext("Mutation", "a"); DataFetcherContext dataFetcherContext = DataFetcherContext.newBuilder().dataFetcherType(DataFetcherType.SERVICE) .namespace("SVC1") .serviceType(ServiceType.REST).build(); XtextGraph xtextGraph = XtextGraphBuilder .build(TestServiceProvider.newBuilder().namespace("SVC1").serviceType(ServiceType.REST) .sdlFiles(ImmutableMap.of("schema.graphqls", schema)).build()); xtextGraph.transform(builder -> builder.codeRegistry(ImmutableMap.of(fieldContext, dataFetcherContext))); XtextGraph stitchedGraph = new XtextGraphFolder() .fold(XtextGraph.emptyGraph(), Collections.singletonList(xtextGraph)); XtextGraph adapterGraph = new GraphQLAdapterTransformer().transform(stitchedGraph); assertThat(adapterGraph.getCodeRegistry().get(new FieldContext("Mutation", "a")).getNamespace()).isEqualTo( "SVC1"); assertThat(adapterGraph.getCodeRegistry().size()).isEqualTo(1); } @Test public void testAdapterTransformerWithoutDirectiveArgument() { String schema = "schema { query: Query } type Query { a: A } " + "type A { b: B } type B {c: C} " + "type C { adapter1: D @adapter(service: 'foo'), adapter2: D @adapter }" + "type D { field: String }" + directive; FieldContext fieldContext = new FieldContext("Query", "a"); DataFetcherContext dataFetcherContext = DataFetcherContext.newBuilder().dataFetcherType(DataFetcherType.SERVICE) .namespace("SVC1") .serviceType(ServiceType.REST).build(); XtextGraph xtextGraph = XtextGraphBuilder .build(TestServiceProvider.newBuilder().namespace("SVC1").serviceType(ServiceType.REST) .sdlFiles(ImmutableMap.of("schema.graphqls", schema)).build()); xtextGraph.transform(builder -> builder.codeRegistry(ImmutableMap.of(fieldContext, dataFetcherContext))); assertThatThrownBy(() -> new GraphQLAdapterTransformer().transform(xtextGraph)).isInstanceOf( SchemaTransformationException.class).hasMessage(AdapterDirectiveVisitor.ERROR_MSG); } }
yblucky/mdrill
trunk/adhoc-jdbc/src/main/java/com/alimama/mdrill/jdbc/InsertParser.java
package com.alimama.mdrill.jdbc; import java.io.StringReader; import java.util.Arrays; import java.util.List; import net.sf.jsqlparser.JSQLParserException; import net.sf.jsqlparser.expression.operators.relational.ExpressionList; import net.sf.jsqlparser.parser.CCJSqlParserManager; import net.sf.jsqlparser.schema.Column; import net.sf.jsqlparser.statement.insert.Insert; import net.sf.jsqlparser.expression.*; import com.alimama.mdrill.json.JSONArray; import com.alimama.mdrill.json.JSONException; import com.alimama.mdrill.json.JSONObject; public class InsertParser { private String sql; public String tablename; public String[] fl; public String jsons; public static void main(String[] args) throws JSQLParserException, JSONException { InsertParser p=new InsertParser(); p.parse("INSERT INTO table_name (列1, 列2) VALUES ('111', '22') "); System.out.println(p.toString()); } public void parse(String sql) throws JSQLParserException, JSONException { this.sql=sql; CCJSqlParserManager parserManager = new CCJSqlParserManager(); Insert insert = (Insert) parserManager.parse(new StringReader(sql)); this.tablename=insert.getTable().getName(); fl=new String[insert.getColumns().size()]; for(int i=0;i<fl.length;i++) { fl[i]=((Column) insert.getColumns().get(i)).getColumnName(); } ExpressionList explist=((ExpressionList) insert.getItemsList()); JSONObject item=new JSONObject(); List expressions=explist.getExpressions(); for(int i=0;i<expressions.size();i++) { Object val=expressions.get(i); if(val instanceof StringValue) { StringValue vv=(StringValue)val; item.put(fl[i], String.valueOf(vv.getValue())); } if(val instanceof LongValue) { LongValue vv=(LongValue)val; item.put(fl[i], String.valueOf(vv.getValue())); } if(val instanceof DoubleValue) { DoubleValue vv=(DoubleValue)val; item.put(fl[i], String.valueOf(vv.getValue())); } } JSONArray list=new JSONArray(); list.put(item); this.jsons=list.toString(); } @Override public String toString() { return "InsertParser [sql=" + sql + ", tablename=" + tablename + ", fl=" + Arrays.toString(fl) + ", jsons=" + jsons + "]"; } }
phishman3579/gs-collections
collections/src/main/java/com/gs/collections/impl/bag/sorted/mutable/TreeBag.java
<filename>collections/src/main/java/com/gs/collections/impl/bag/sorted/mutable/TreeBag.java /* * Copyright 2015 <NAME>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gs.collections.impl.bag.sorted.mutable; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import java.util.concurrent.ExecutorService; import com.gs.collections.api.annotation.Beta; import com.gs.collections.api.bag.Bag; import com.gs.collections.api.bag.sorted.ImmutableSortedBag; import com.gs.collections.api.bag.sorted.MutableSortedBag; import com.gs.collections.api.bag.sorted.ParallelSortedBag; import com.gs.collections.api.bag.sorted.SortedBag; import com.gs.collections.api.block.function.Function; import com.gs.collections.api.block.function.Function0; import com.gs.collections.api.block.function.Function2; import com.gs.collections.api.block.function.primitive.BooleanFunction; import com.gs.collections.api.block.function.primitive.ByteFunction; import com.gs.collections.api.block.function.primitive.CharFunction; import com.gs.collections.api.block.function.primitive.DoubleFunction; import com.gs.collections.api.block.function.primitive.FloatFunction; import com.gs.collections.api.block.function.primitive.IntFunction; import com.gs.collections.api.block.function.primitive.LongFunction; import com.gs.collections.api.block.function.primitive.ShortFunction; import com.gs.collections.api.block.predicate.Predicate; import com.gs.collections.api.block.predicate.Predicate2; import com.gs.collections.api.block.predicate.primitive.IntPredicate; import com.gs.collections.api.block.procedure.Procedure; import com.gs.collections.api.block.procedure.Procedure2; import com.gs.collections.api.block.procedure.primitive.ObjectIntProcedure; import com.gs.collections.api.list.MutableList; import com.gs.collections.api.list.primitive.MutableBooleanList; import com.gs.collections.api.list.primitive.MutableByteList; import com.gs.collections.api.list.primitive.MutableCharList; import com.gs.collections.api.list.primitive.MutableDoubleList; import com.gs.collections.api.list.primitive.MutableFloatList; import com.gs.collections.api.list.primitive.MutableIntList; import com.gs.collections.api.list.primitive.MutableLongList; import com.gs.collections.api.list.primitive.MutableShortList; import com.gs.collections.api.map.sorted.MutableSortedMap; import com.gs.collections.api.ordered.OrderedIterable; import com.gs.collections.api.partition.bag.sorted.PartitionMutableSortedBag; import com.gs.collections.api.set.sorted.MutableSortedSet; import com.gs.collections.api.stack.MutableStack; import com.gs.collections.api.tuple.Pair; import com.gs.collections.impl.Counter; import com.gs.collections.impl.bag.mutable.AbstractMutableBag; import com.gs.collections.impl.block.factory.Predicates2; import com.gs.collections.impl.block.procedure.checked.CheckedProcedure2; import com.gs.collections.impl.list.mutable.FastList; import com.gs.collections.impl.list.mutable.primitive.BooleanArrayList; import com.gs.collections.impl.list.mutable.primitive.ByteArrayList; import com.gs.collections.impl.list.mutable.primitive.CharArrayList; import com.gs.collections.impl.list.mutable.primitive.DoubleArrayList; import com.gs.collections.impl.list.mutable.primitive.FloatArrayList; import com.gs.collections.impl.list.mutable.primitive.IntArrayList; import com.gs.collections.impl.list.mutable.primitive.LongArrayList; import com.gs.collections.impl.list.mutable.primitive.ShortArrayList; import com.gs.collections.impl.map.sorted.mutable.TreeSortedMap; import com.gs.collections.impl.multimap.bag.sorted.TreeBagMultimap; import com.gs.collections.impl.partition.bag.sorted.PartitionTreeBag; import com.gs.collections.impl.set.mutable.UnifiedSet; import com.gs.collections.impl.set.sorted.mutable.TreeSortedSet; import com.gs.collections.impl.stack.mutable.ArrayStack; import com.gs.collections.impl.utility.Iterate; import com.gs.collections.impl.utility.ListIterate; import com.gs.collections.impl.utility.OrderedIterate; import com.gs.collections.impl.utility.internal.IterableIterate; /** * A TreeBag is a MutableSortedBag which uses a SortedMap as its underlying data store. Each key in the SortedMap represents some item, * and the value in the map represents the current number of occurrences of that item. * * @since 4.2 */ public class TreeBag<T> extends AbstractMutableBag<T> implements Externalizable, MutableSortedBag<T> { private static final Function0<Counter> NEW_COUNTER_BLOCK = new Function0<Counter>() { public Counter value() { return new Counter(); } }; private static final long serialVersionUID = 1L; private MutableSortedMap<T, Counter> items; private int size; public TreeBag() { this.items = TreeSortedMap.newMap(); } private TreeBag(MutableSortedMap<T, Counter> map) { this.items = map; this.size = (int) map.valuesView().sumOfInt(Counter.TO_COUNT); } public TreeBag(Comparator<? super T> comparator) { this.items = TreeSortedMap.newMap(comparator); } public TreeBag(SortedBag<T> sortedBag) { this(sortedBag.comparator(), sortedBag); } public TreeBag(Comparator<? super T> comparator, Iterable<? extends T> iterable) { this(comparator); this.addAllIterable(iterable); } public static <E> TreeBag<E> newBag() { return new TreeBag<E>(); } public static <E> TreeBag<E> newBag(Comparator<? super E> comparator) { return new TreeBag<E>(comparator); } public static <E> TreeBag<E> newBag(Iterable<? extends E> source) { if (source instanceof SortedBag<?>) { return new TreeBag<E>((SortedBag<E>) source); } return Iterate.addAllTo(source, TreeBag.<E>newBag()); } public static <E> TreeBag<E> newBag(Comparator<? super E> comparator, Iterable<? extends E> iterable) { return new TreeBag<E>(comparator, iterable); } public static <E> TreeBag<E> newBagWith(E... elements) { //noinspection SSBasedInspection return TreeBag.newBag(Arrays.asList(elements)); } public static <E> TreeBag<E> newBagWith(Comparator<? super E> comparator, E... elements) { //noinspection SSBasedInspection return TreeBag.newBag(comparator, Arrays.asList(elements)); } private static <T> int compare(SortedBag<T> bagA, SortedBag<T> bagB) { Iterator<T> itrA = bagA.iterator(); Iterator<T> itrB = bagB.iterator(); if (bagA.comparator() != null) { Comparator<? super T> comparator = bagA.comparator(); while (itrA.hasNext()) { if (itrB.hasNext()) { int val = comparator.compare(itrA.next(), itrB.next()); if (val != 0) { return val; } } else { return 1; } } return itrB.hasNext() ? -1 : 0; } while (itrA.hasNext()) { if (itrB.hasNext()) { int val = ((Comparable<T>) itrA.next()).compareTo(itrB.next()); if (val != 0) { return val; } } else { return 1; } } return itrB.hasNext() ? -1 : 0; } @Override public TreeBag<T> clone() { try { TreeBag<T> clone = (TreeBag<T>) super.clone(); clone.items = this.items.clone(); return clone; } catch (CloneNotSupportedException e) { throw new AssertionError(e); } } @Override public boolean equals(Object other) { if (this == other) { return true; } if (!(other instanceof Bag)) { return false; } final Bag<?> bag = (Bag<?>) other; if (this.sizeDistinct() != bag.sizeDistinct()) { return false; } return this.items.keyValuesView().allSatisfy(new Predicate<Pair<T, Counter>>() { public boolean accept(Pair<T, Counter> each) { return bag.occurrencesOf(each.getOne()) == each.getTwo().getCount(); } }); } public int sizeDistinct() { return this.items.size(); } @Override public int hashCode() { final Counter counter = new Counter(); this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T each, int count) { counter.add((each == null ? 0 : each.hashCode()) ^ count); } }); return counter.getCount(); } public void forEachWithOccurrences(final ObjectIntProcedure<? super T> procedure) { this.items.forEachKeyValue(new Procedure2<T, Counter>() { public void value(T item, Counter count) { procedure.value(item, count.getCount()); } }); } public MutableSortedBag<T> selectByOccurrences(final IntPredicate predicate) { MutableSortedMap<T, Counter> map = this.items.select(new Predicate2<T, Counter>() { public boolean accept(T each, Counter occurrences) { return predicate.accept(occurrences.getCount()); } }); return new TreeBag<T>(map); } public int occurrencesOf(Object item) { Counter counter = this.items.get(item); return counter == null ? 0 : counter.getCount(); } public MutableSortedMap<T, Integer> toMapOfItemToCount() { final MutableSortedMap<T, Integer> map = TreeSortedMap.newMap(this.comparator()); this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T item, int count) { map.put(item, count); } }); return map; } public String toStringOfItemToCount() { return this.items.toString(); } @Override public boolean isEmpty() { return this.items.isEmpty(); } @Override public boolean remove(Object item) { Counter counter = this.items.get(item); if (counter != null) { if (counter.getCount() > 1) { counter.decrement(); } else { this.items.remove(item); } this.size--; return true; } return false; } public void clear() { this.items.clear(); this.size = 0; } @Override public boolean contains(Object o) { return this.items.containsKey(o); } public int compareTo(SortedBag<T> otherBag) { return TreeBag.compare(this, otherBag); } public void writeExternal(final ObjectOutput out) throws IOException { out.writeObject(this.comparator()); out.writeInt(this.items.size()); try { this.items.forEachKeyValue(new CheckedProcedure2<T, Counter>() { public void safeValue(T object, Counter parameter) throws Exception { out.writeObject(object); out.writeInt(parameter.getCount()); } }); } catch (RuntimeException e) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw e; } } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { this.items = new TreeSortedMap<T, Counter>((Comparator<T>) in.readObject()); int size = in.readInt(); for (int i = 0; i < size; i++) { this.addOccurrences((T) in.readObject(), in.readInt()); } } public MutableSortedBag<T> tap(Procedure<? super T> procedure) { this.forEach(procedure); return this; } public void each(final Procedure<? super T> procedure) { this.items.forEachKeyValue(new Procedure2<T, Counter>() { public void value(T key, Counter value) { for (int i = 0; i < value.getCount(); i++) { procedure.value(key); } } }); } @Override public void forEachWithIndex(final ObjectIntProcedure<? super T> objectIntProcedure) { final Counter index = new Counter(); this.items.forEachKeyValue(new Procedure2<T, Counter>() { public void value(T key, Counter value) { for (int i = 0; i < value.getCount(); i++) { objectIntProcedure.value(key, index.getCount()); index.increment(); } } }); } public void forEach(int fromIndex, int toIndex, Procedure<? super T> procedure) { ListIterate.rangeCheck(fromIndex, toIndex, this.size); if (fromIndex > toIndex) { throw new IllegalArgumentException("fromIndex must not be greater than toIndex"); } Iterator<Map.Entry<T, Counter>> iterator = this.items.entrySet().iterator(); int i = 0; while (iterator.hasNext() && i < fromIndex) { Map.Entry<T, Counter> entry = iterator.next(); Counter value = entry.getValue(); int count = value.getCount(); if (i + count < fromIndex) { i += count; } else { for (int j = 0; j < count; j++) { if (i >= fromIndex && i <= toIndex) { procedure.value(entry.getKey()); } i++; } } } while (iterator.hasNext() && i <= toIndex) { Map.Entry<T, Counter> entry = iterator.next(); Counter value = entry.getValue(); int count = value.getCount(); for (int j = 0; j < count; j++) { if (i <= toIndex) { procedure.value(entry.getKey()); } i++; } } } public void forEachWithIndex(int fromIndex, int toIndex, ObjectIntProcedure<? super T> objectIntProcedure) { ListIterate.rangeCheck(fromIndex, toIndex, this.size); if (fromIndex > toIndex) { throw new IllegalArgumentException("fromIndex must not be greater than toIndex"); } Iterator<Map.Entry<T, Counter>> iterator = this.items.entrySet().iterator(); int i = 0; while (iterator.hasNext() && i < fromIndex) { Map.Entry<T, Counter> entry = iterator.next(); Counter value = entry.getValue(); int count = value.getCount(); if (i + count < fromIndex) { i += count; } else { for (int j = 0; j < count; j++) { if (i >= fromIndex && i <= toIndex) { objectIntProcedure.value(entry.getKey(), i); } i++; } } } while (iterator.hasNext() && i <= toIndex) { Map.Entry<T, Counter> entry = iterator.next(); Counter value = entry.getValue(); int count = value.getCount(); for (int j = 0; j < count; j++) { if (i <= toIndex) { objectIntProcedure.value(entry.getKey(), i); } i++; } } } @Override public <P> void forEachWith(final Procedure2<? super T, ? super P> procedure, final P parameter) { this.items.forEachKeyValue(new Procedure2<T, Counter>() { public void value(T key, Counter value) { for (int i = 0; i < value.getCount(); i++) { procedure.value(key, parameter); } } }); } public Iterator<T> iterator() { return new InternalIterator(); } public void addOccurrences(T item, int occurrences) { if (occurrences < 0) { throw new IllegalArgumentException("Cannot add a negative number of occurrences"); } if (occurrences > 0) { this.items.getIfAbsentPut(item, NEW_COUNTER_BLOCK).add(occurrences); this.size += occurrences; } } public boolean removeOccurrences(Object item, int occurrences) { if (occurrences < 0) { throw new IllegalArgumentException("Cannot remove a negative number of occurrences"); } if (occurrences == 0) { return false; } Counter counter = this.items.get(item); if (counter == null) { return false; } int startCount = counter.getCount(); if (occurrences >= startCount) { this.items.remove(item); this.size -= startCount; return true; } counter.add(occurrences * -1); this.size -= occurrences; return true; } public boolean setOccurrences(T item, int occurrences) { if (occurrences < 0) { throw new IllegalArgumentException("Cannot set a negative number of occurrences"); } int originalOccurrences = this.occurrencesOf(item); if (originalOccurrences == occurrences) { return false; } if (occurrences == 0) { this.items.remove(item); } else { this.items.put(item, new Counter(occurrences)); } this.size -= originalOccurrences - occurrences; return true; } public TreeBag<T> without(T element) { this.remove(element); return this; } public TreeBag<T> withAll(Iterable<? extends T> iterable) { this.addAllIterable(iterable); return this; } public TreeBag<T> withoutAll(Iterable<? extends T> iterable) { this.removeAllIterable(iterable); return this; } public ImmutableSortedBag<T> toImmutable() { throw new UnsupportedOperationException(this.getClass().getSimpleName() + ".toImmutable() not implemented yet"); } public <P, V> MutableList<V> collectWith( Function2<? super T, ? super P, ? extends V> function, P parameter) { MutableList<V> result = FastList.newList(); this.collectWith(function, parameter, result); return result; } public TreeBag<T> with(T element) { this.add(element); return this; } public MutableSortedBag<T> newEmpty() { return TreeBag.newBag(this.items.comparator()); } public <P> MutableSortedBag<T> selectWith(final Predicate2<? super T, ? super P> predicate, final P parameter) { final MutableSortedBag<T> result = TreeBag.newBag(this.comparator()); this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T each, int occurrences) { if (predicate.accept(each, parameter)) { result.addOccurrences(each, occurrences); } } }); return result; } public <P> MutableSortedBag<T> rejectWith(final Predicate2<? super T, ? super P> predicate, final P parameter) { final MutableSortedBag<T> result = TreeBag.newBag(this.comparator()); this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T each, int index) { if (!predicate.accept(each, parameter)) { result.addOccurrences(each, index); } } }); return result; } @Override public void removeIf(Predicate<? super T> predicate) { Set<Map.Entry<T, Counter>> entries = this.items.entrySet(); for (Iterator<Map.Entry<T, Counter>> iterator = entries.iterator(); iterator.hasNext(); ) { Map.Entry<T, Counter> entry = iterator.next(); if (predicate.accept(entry.getKey())) { this.size -= entry.getValue().getCount(); iterator.remove(); } } } @Override public <P> void removeIfWith(Predicate2<? super T, ? super P> predicate, P parameter) { Set<Map.Entry<T, Counter>> entries = this.items.entrySet(); for (Iterator<Map.Entry<T, Counter>> iterator = entries.iterator(); iterator.hasNext(); ) { Map.Entry<T, Counter> entry = iterator.next(); if (predicate.accept(entry.getKey(), parameter)) { this.size -= entry.getValue().getCount(); iterator.remove(); } } } @Override public <P> T detectWithIfNone( final Predicate2<? super T, ? super P> predicate, final P parameter, Function0<? extends T> function) { return this.items.keysView().detectIfNone(new Predicate<T>() { public boolean accept(T each) { return predicate.accept(each, parameter); } }, function); } public UnmodifiableSortedBag<T> asUnmodifiable() { return UnmodifiableSortedBag.of(this); } public MutableSortedBag<T> asSynchronized() { return SynchronizedSortedBag.of(this); } @Beta public ParallelSortedBag<T> asParallel(ExecutorService executorService, int batchSize) { if (executorService == null) { throw new NullPointerException(); } if (batchSize < 1) { throw new IllegalArgumentException(); } throw new UnsupportedOperationException(this.getClass().getSimpleName() + ".asParallel() not implemented yet"); } @Override public boolean removeAllIterable(Iterable<?> iterable) { int oldSize = this.size; for (Object each : iterable) { Counter removed = this.items.remove(each); if (removed != null) { this.size -= removed.getCount(); } } return this.size != oldSize; } @Override public boolean retainAllIterable(Iterable<?> iterable) { int oldSize = this.size; this.removeIfWith(Predicates2.notIn(), UnifiedSet.newSet(iterable)); return this.size != oldSize; } public int size() { return this.size; } public MutableSortedBag<T> reject(final Predicate<? super T> predicate) { final MutableSortedBag<T> result = TreeBag.newBag(this.comparator()); this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T each, int index) { if (!predicate.accept(each)) { result.addOccurrences(each, index); } } }); return result; } public PartitionMutableSortedBag<T> partition(final Predicate<? super T> predicate) { final PartitionMutableSortedBag<T> result = new PartitionTreeBag<T>(this.comparator()); this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T each, int index) { MutableSortedBag<T> bucket = predicate.accept(each) ? result.getSelected() : result.getRejected(); bucket.addOccurrences(each, index); } }); return result; } public <P> PartitionMutableSortedBag<T> partitionWith(final Predicate2<? super T, ? super P> predicate, final P parameter) { final PartitionMutableSortedBag<T> result = new PartitionTreeBag<T>(this.comparator()); this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T each, int index) { MutableSortedBag<T> bucket = predicate.accept(each, parameter) ? result.getSelected() : result.getRejected(); bucket.addOccurrences(each, index); } }); return result; } public PartitionMutableSortedBag<T> partitionWhile(Predicate<? super T> predicate) { PartitionTreeBag<T> result = new PartitionTreeBag<T>(this.comparator()); return IterableIterate.partitionWhile(this, predicate, result); } public <S> MutableSortedBag<S> selectInstancesOf(final Class<S> clazz) { Comparator<? super S> comparator = (Comparator<? super S>) this.comparator(); final MutableSortedBag<S> result = TreeBag.newBag(comparator); this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T each, int occurrences) { if (clazz.isInstance(each)) { result.addOccurrences(clazz.cast(each), occurrences); } } }); return result; } public <V> TreeBagMultimap<V, T> groupBy(Function<? super T, ? extends V> function) { return this.groupBy(function, TreeBagMultimap.<V, T>newMultimap(this.comparator())); } public <V> TreeBagMultimap<V, T> groupByEach(Function<? super T, ? extends Iterable<V>> function) { return this.groupByEach(function, TreeBagMultimap.<V, T>newMultimap(this.comparator())); } public MutableByteList collectByte(final ByteFunction<? super T> byteFunction) { final MutableByteList result = new ByteArrayList(); this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T each, int occurrences) { byte element = byteFunction.byteValueOf(each); for (int i = 0; i < occurrences; i++) { result.add(element); } } }); return result; } public int indexOf(Object object) { if (this.items.containsKey(object)) { long result = this.items.headMap((T) object).values().sumOfInt(Counter.TO_COUNT); if (result > Integer.MAX_VALUE) { throw new IllegalStateException(); } return (int) result; } return -1; } public T getFirst() { return this.items.keysView().getFirst(); } public MutableSortedSet<Pair<T, Integer>> zipWithIndex() { return this.zipWithIndex(TreeSortedSet.<Pair<T, Integer>>newSet()); } public T getLast() { return this.items.keysView().getLast(); } public <V> MutableList<V> collect(final Function<? super T, ? extends V> function) { final MutableList<V> result = FastList.newList(); this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T each, int occurrences) { V value = function.valueOf(each); for (int i = 0; i < occurrences; i++) { result.add(value); } } }); return result; } public <V> MutableList<V> flatCollect(Function<? super T, ? extends Iterable<V>> function) { MutableList<V> result = FastList.newList(); this.flatCollect(function, result); return result; } @Override public <V, R extends Collection<V>> R flatCollect(final Function<? super T, ? extends Iterable<V>> function, final R target) { this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T each, int occurrences) { Iterable<V> values = function.valueOf(each); for (int i = 0; i < occurrences; i++) { Iterate.forEach(values, new Procedure<V>() { public void value(V each) { target.add(each); } }); } } }); return target; } public MutableSortedSet<T> distinct() { return TreeSortedSet.newSet(this.comparator(), this.items.keySet()); } public MutableSortedBag<T> takeWhile(Predicate<? super T> predicate) { MutableSortedBag<T> result = TreeBag.newBag(this.comparator()); return IterableIterate.takeWhile(this, predicate, result); } public MutableSortedBag<T> dropWhile(Predicate<? super T> predicate) { MutableSortedBag<T> result = TreeBag.newBag(this.comparator()); return IterableIterate.dropWhile(this, predicate, result); } public MutableSortedBag<T> select(final Predicate<? super T> predicate) { final MutableSortedBag<T> result = TreeBag.newBag(this.comparator()); this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T each, int occurrences) { if (predicate.accept(each)) { result.addOccurrences(each, occurrences); } } }); return result; } public MutableBooleanList collectBoolean(BooleanFunction<? super T> booleanFunction) { return this.collectBoolean(booleanFunction, new BooleanArrayList()); } public MutableCharList collectChar(CharFunction<? super T> charFunction) { return this.collectChar(charFunction, new CharArrayList()); } public MutableDoubleList collectDouble(DoubleFunction<? super T> doubleFunction) { return this.collectDouble(doubleFunction, new DoubleArrayList()); } public MutableFloatList collectFloat(FloatFunction<? super T> floatFunction) { return this.collectFloat(floatFunction, new FloatArrayList()); } public MutableIntList collectInt(IntFunction<? super T> intFunction) { return this.collectInt(intFunction, new IntArrayList()); } public MutableLongList collectLong(LongFunction<? super T> longFunction) { return this.collectLong(longFunction, new LongArrayList()); } public MutableShortList collectShort(ShortFunction<? super T> shortFunction) { return this.collectShort(shortFunction, new ShortArrayList()); } public <V> MutableList<V> collectIf( final Predicate<? super T> predicate, final Function<? super T, ? extends V> function) { final MutableList<V> result = FastList.newList(); this.forEachWithOccurrences(new ObjectIntProcedure<T>() { public void value(T each, int occurrences) { if (predicate.accept(each)) { V element = function.valueOf(each); for (int i = 0; i < occurrences; i++) { result.add(element); } } } }); return result; } public <S> MutableList<Pair<T, S>> zip(Iterable<S> that) { return this.zip(that, FastList.<Pair<T, S>>newList()); } @Override public T detect(Predicate<? super T> predicate) { return this.items.keysView().detect(predicate); } @Override public <P> T detectWith(Predicate2<? super T, ? super P> predicate, P parameter) { return this.items.keysView().detectWith(predicate, parameter); } @Override public T detectIfNone(Predicate<? super T> predicate, Function0<? extends T> function) { return this.items.keysView().detectIfNone(predicate, function); } public int detectIndex(Predicate<? super T> predicate) { return Iterate.detectIndex(this, predicate); } public <S> boolean corresponds(OrderedIterable<S> other, Predicate2<? super T, ? super S> predicate) { return OrderedIterate.corresponds(this, other, predicate); } @Override public boolean anySatisfy(Predicate<? super T> predicate) { return this.items.keysView().anySatisfy(predicate); } @Override public <P> boolean anySatisfyWith(Predicate2<? super T, ? super P> predicate, P parameter) { return this.items.keysView().anySatisfyWith(predicate, parameter); } @Override public boolean allSatisfy(Predicate<? super T> predicate) { return this.items.keysView().allSatisfy(predicate); } @Override public <P> boolean allSatisfyWith(Predicate2<? super T, ? super P> predicate, P parameter) { return this.items.keysView().allSatisfyWith(predicate, parameter); } @Override public boolean noneSatisfy(Predicate<? super T> predicate) { return this.items.keysView().noneSatisfy(predicate); } @Override public <P> boolean noneSatisfyWith(Predicate2<? super T, ? super P> predicate, P parameter) { return this.items.keysView().noneSatisfyWith(predicate, parameter); } public MutableStack<T> toStack() { return ArrayStack.newStack(this); } @Override public T min(Comparator<? super T> comparator) { return this.items.keysView().min(comparator); } @Override public T max(Comparator<? super T> comparator) { return this.items.keysView().max(comparator); } @Override public T min() { return this.items.keysView().min(); } @Override public T max() { return this.items.keysView().max(); } @Override public <V extends Comparable<? super V>> T minBy(Function<? super T, ? extends V> function) { return this.items.keysView().minBy(function); } @Override public <V extends Comparable<? super V>> T maxBy(Function<? super T, ? extends V> function) { return this.items.keysView().maxBy(function); } public Comparator<? super T> comparator() { return this.items.comparator(); } public TreeBag<T> with(T... elements) { this.addAll(Arrays.asList(elements)); return this; } public TreeBag<T> with(T element1, T element2) { this.add(element1); this.add(element2); return this; } @Override public boolean add(T item) { Counter counter = this.items.getIfAbsentPut(item, NEW_COUNTER_BLOCK); counter.increment(); this.size++; return true; } public TreeBag<T> with(T element1, T element2, T element3) { this.add(element1); this.add(element2); this.add(element3); return this; } private class InternalIterator implements Iterator<T> { private int position; private boolean isCurrentKeySet; private int currentKeyPosition; private int currentKeyOccurrences; private Iterator<Pair<T, Counter>> keyValueIterator = TreeBag.this.items.keyValuesView().iterator(); private Pair<T, Counter> currentKeyValue; public boolean hasNext() { return this.position != TreeBag.this.size; } public T next() { if (!this.hasNext()) { throw new NoSuchElementException(); } this.isCurrentKeySet = true; if (this.currentKeyPosition < this.currentKeyOccurrences) { this.currentKeyPosition++; this.position++; return this.currentKeyValue.getOne(); } this.currentKeyValue = this.keyValueIterator.next(); this.currentKeyPosition = 1; this.currentKeyOccurrences = this.currentKeyValue.getTwo().getCount(); this.position++; return this.currentKeyValue.getOne(); } public void remove() { if (!this.isCurrentKeySet) { throw new IllegalStateException(); } this.isCurrentKeySet = false; this.position--; TreeBag.this.remove(this.currentKeyValue.getOne()); this.keyValueIterator = TreeBag.this.items.keyValuesView().iterator(); this.currentKeyOccurrences--; this.currentKeyPosition--; } } }
brian-kelley/seacas
docs/apr_html/apr__units_8cc.js
<reponame>brian-kelley/seacas var apr__units_8cc = [ [ "unit_systems", "structSEAMS_1_1anonymous__namespace_02apr__units_8cc_03_1_1unit__systems.html", "structSEAMS_1_1anonymous__namespace_02apr__units_8cc_03_1_1unit__systems" ], [ "comment", "apr__units_8cc.html#a6771b8f4b4e92fa65594a32fd5cedb1d", null ], [ "define_var", "apr__units_8cc.html#ab5b7860231f3d5feebe9fae95f120a72", null ], [ "do_Units", "apr__units_8cc.html#a0006d397eeb4158d75c33aa2e40026bc", null ], [ "load_conversion", "apr__units_8cc.html#a2e9659876c6a92b2179c0f6945f84aca", null ], [ "cgs", "apr__units_8cc.html#af53daf0742462644441db32b7854d5ec", null ], [ "cgs_ev", "apr__units_8cc.html#a5df15aa80c49406c261f390152c0e98d", null ], [ "cgs_ev_label", "apr__units_8cc.html#a22c3481cff9c96d6f94c85c5212abb01", null ], [ "cgs_label", "apr__units_8cc.html#af5782839edf6a72b8d08f5dce4d10ce9", null ], [ "ft_lbf_s", "apr__units_8cc.html#a295e48dc8e650db6da5386dd23e2de75", null ], [ "ft_lbf_s_label", "apr__units_8cc.html#a467b06007c6a2f8eaa8f50c6681fb248", null ], [ "ft_lbm_s", "apr__units_8cc.html#a2220c59e9a28b4c6625ad8dff878c4eb", null ], [ "ft_lbm_s_label", "apr__units_8cc.html#a8b35598cbb1f4658649f5925272a23b2", null ], [ "in_lbf_s", "apr__units_8cc.html#a0a941f8afd739c800af260d813c3bb5d", null ], [ "in_lbf_s_label", "apr__units_8cc.html#a2f0a94df916d190d6f20bee819163332", null ], [ "LBF_TO_N", "apr__units_8cc.html#ad2886e138d69df2b71d658e0ed9c4722", null ], [ "PI", "apr__units_8cc.html#a1fdc6ec0bab3d1a1153dc83a15206f3d", null ], [ "shock", "apr__units_8cc.html#a625a35df442455bbe514a20e97ac51ca", null ], [ "shock_label", "apr__units_8cc.html#a197cf19c3aa1c044c7fae3cd75f62edb", null ], [ "si", "apr__units_8cc.html#a15f7cbe92c79fc34ea503064f00950d3", null ], [ "si_label", "apr__units_8cc.html#ac285675b0d7751c584262ea6149de934", null ], [ "swap", "apr__units_8cc.html#a2e17cb56b857779aaeb299f7af83db20", null ], [ "swap_label", "apr__units_8cc.html#ad2d06c3769cb804bc73332edc248131c", null ], [ "systems", "apr__units_8cc.html#a1a9885027c483cf8bbf35169ac14ba93", null ] ];
sgholamian/log-aware-clone-detection
LACCPlus/Hadoop/78_2.java
//,temp,FsDatasetImpl.java,1218,1241,temp,FsDatasetImpl.java,1061,1097 //,3 public class xxx { @Override // FsDatasetSpi public synchronized ReplicaHandler append(ExtendedBlock b, long newGS, long expectedBlockLen) throws IOException { // If the block was successfully finalized because all packets // were successfully processed at the Datanode but the ack for // some of the packets were not received by the client. The client // re-opens the connection and retries sending those packets. // The other reason is that an "append" is occurring to this block. // check the validity of the parameter if (newGS < b.getGenerationStamp()) { throw new IOException("The new generation stamp " + newGS + " should be greater than the replica " + b + "'s generation stamp"); } ReplicaInfo replicaInfo = getReplicaInfo(b); LOG.info("Appending to " + replicaInfo); if (replicaInfo.getState() != ReplicaState.FINALIZED) { throw new ReplicaNotFoundException( ReplicaNotFoundException.UNFINALIZED_REPLICA + b); } if (replicaInfo.getNumBytes() != expectedBlockLen) { throw new IOException("Corrupted replica " + replicaInfo + " with a length of " + replicaInfo.getNumBytes() + " expected length is " + expectedBlockLen); } FsVolumeReference ref = replicaInfo.getVolume().obtainReference(); ReplicaBeingWritten replica = null; try { replica = append(b.getBlockPoolId(), (FinalizedReplica)replicaInfo, newGS, b.getNumBytes()); } catch (IOException e) { IOUtils.cleanup(null, ref); throw e; } return new ReplicaHandler(replica, ref); } };
Pavel3333/xray-oxygen
code/engine.vc2008/xrGame/base_client_classes.h
//////////////////////////////////////////////////////////////////////////// // Module : base_client_classes.h // Created : 20.12.2004 // Modified : 20.12.2004 // Author : <NAME> // Description : XRay base client classes script export //////////////////////////////////////////////////////////////////////////// #pragma once #include "../xrScripts/export/script_export_space.h" class DLL_Pure; class ISpatial; class ISheduled; class IRenderable; class ICollidable; class CObject; class IRenderVisual; class FHierrarhyVisual; class CBlend; class IKinematics; class IKinematicsAnimated; using DLL_PureScript = class_exporter<DLL_Pure>; add_to_type_list(DLL_PureScript) #undef script_type_list #define script_type_list save_type_list(DLL_PureScript) //typedef class_exporter<ISpatial> ISpatialScript; //add_to_type_list(ISpatialScript) //#undef script_type_list //#define script_type_list save_type_list(ISpatialScript) using ISheduledScript = class_exporter<ISheduled>; add_to_type_list(ISheduledScript) #undef script_type_list #define script_type_list save_type_list(ISheduledScript) using IRenderableScript = class_exporter<IRenderable>; add_to_type_list(IRenderableScript) #undef script_type_list #define script_type_list save_type_list(IRenderableScript) using ICollidableScript = class_exporter<ICollidable>; add_to_type_list(ICollidableScript) #undef script_type_list #define script_type_list save_type_list(ICollidableScript) using CObjectScript = class_exporter<CObject>; add_to_type_list(CObjectScript) #undef script_type_list #define script_type_list save_type_list(CObjectScript) using CBlendScript = class_exporter<CBlend>; add_to_type_list(CBlendScript) #undef script_type_list #define script_type_list save_type_list(CBlendScript) using IRender_VisualScript = class_exporter<IRenderVisual>; add_to_type_list(IRender_VisualScript) #undef script_type_list #define script_type_list save_type_list(IRender_VisualScript) /* typedef class_exporter<FHierrarhyVisual> FHierrarhyVisualScript; add_to_type_list(FHierrarhyVisualScript) #undef script_type_list #define script_type_list save_type_list(FHierrarhyVisualScript) typedef class_exporter<IKinematics> IKinematicsScript; add_to_type_list(IKinematicsScript) #undef script_type_list #define script_type_list save_type_list(IKinematicsScript) */ using IKinematicsAnimatedScript = class_exporter<IKinematicsAnimated>; add_to_type_list(IKinematicsAnimatedScript) #undef script_type_list #define script_type_list save_type_list(IKinematicsAnimatedScript)
sarataha/yalla-netlob
node_modules/fileupload/index.js
/*! * <NAME> <<EMAIL>> * * New BSD Licensed * * Wednesday 18th Jan 2012 */ module.exports = require('./lib/fileupload.js')
jhockett/amplify-ci-support
src/credentials_rotators/npm/stacks/common_stack.py
<gh_stars>1-10 import subprocess from aws_cdk import core from aws_cdk.aws_cloudwatch_actions import SnsAction from aws_cdk.aws_iam import Effect, PolicyStatement, ServicePrincipal from aws_cdk.aws_lambda import Code, LayerVersion from aws_cdk.aws_secretsmanager import Secret from aws_cdk.aws_sns import Topic from aws_cdk.aws_sns_subscriptions import EmailSubscription from lambda_functions.secrets_config_utils import (get_alarm_subscriptions, get_secret_arn, get_secret_key, get_secrets_config) class CommonStack(core.Stack): """ Holds the resources and methods common to all rotator stacks """ def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) self.dependencies_lambda_layer = self.create_dependencies_layer() self.secrets_config = get_secrets_config() def create_dependencies_layer(self) -> LayerVersion: """ Creates a lambda layer containing the external packages (pyotp, requests) which are required for the secret rotation """ requirements_file = 'lambda_layers/external_dependencies/requirements.txt' output_dir = 'lambda_layers/external_dependencies' subprocess.check_call( f'pip3 install --upgrade -r {requirements_file} -t {output_dir}/python'.split() ) layer_id = 'external-dependencies' layer_code = Code.from_asset(output_dir) return LayerVersion(self, layer_id, code=layer_code) def grant_secrets_manager_access_to_lambda(self, rotator_lambda): """ Adds a resource based policy to the lambda used for rotation which gives secrets manager service access to invoke the lambda. Documentation can be found here: https://docs.aws.amazon.com/secretsmanager/latest/userguide/troubleshoot_rotation.html#tshoot-lambda-initialconfig-perms Args: rotator_lambda (Function): The lambda function used for rotating a secret """ service_principal = ServicePrincipal(service='secretsmanager.amazonaws.com') rotator_lambda.add_permission('invoke_access_to_secrets_manager', principal=service_principal) def grant_lambda_access_to_rotate_secret(self, rotator_lambda, secret_config): """ Adds a custom policy to the lambda role which gives it access to the secret being rotated. Documentation can be found here: https://docs.aws.amazon.com/secretsmanager/latest/userguide/troubleshoot_rotation.html#tshoot-lambda-accessdeniedduringrotation Args: rotator_lambda (Function): The lambda function used for rotating a secret secret_config (Dictionary): The configuration for the secret specified in secrets_config.json """ secret_arn = get_secret_arn(secret_config) rotator_lambda.add_to_role_policy(PolicyStatement(effect=Effect.ALLOW, resources=[secret_arn], actions=["secretsmanager:DescribeSecret", "secretsmanager:GetSecretValue", "secretsmanager:PutSecretValue", "secretsmanager:UpdateSecretVersionStage"] ) ) rotator_lambda.add_to_role_policy(PolicyStatement(effect=Effect.ALLOW, resources=['*'], actions=["secretsmanager:GetRandomPassword"] ) ) def grant_lambda_access_to_secrets(self, rotator_lambda, secret_configs): """ Adds a custom policy to the lambda role which gives it access to the static secrets used for authentication. Args: rotator_lambda (Function): The lambda function used for rotating a secret secret_configs (Dictionary): list of configurations for static secrets used for authentication by rotator_lambda """ for secret_config in secret_configs: secret_arn = get_secret_arn(secret_config) rotator_lambda.add_to_role_policy(PolicyStatement(effect=Effect.ALLOW, resources=[secret_arn], actions=["secretsmanager:GetSecretValue"] ) ) def configure_secret_rotation(self, rotator_lambda, secret_config, duration): """ Adds the rotator_lambda to the secret referenced using secret_id in the secrets_config.json. Args: rotator_lambda (Function): The lambda function used for rotating a secret secret_config (Dictionary): The configuration for the secret specified in secrets_config.json duration (Duration): The rotation time interval """ secret_arn = get_secret_arn(secret_config) secret_key = get_secret_key(secret_config) secret = Secret.from_secret_complete_arn(self, secret_key, secret_complete_arn=secret_arn) secret.add_rotation_schedule(id=f'{secret_key}-rotator', automatically_after=duration, rotation_lambda=rotator_lambda) def enable_cloudwatch_alarm_notifications(self, rotator_lambda, secret_id): """ Adds a cloudwatch alarm to monitor the error metrics. Subscribes the given emails to receive email alerts. Args: rotator_lambda (Function): The lambda function used for rotating a secret secret_id (string): The identifier corresponding to the secret in the secrets_config.json file """ # create an sns topic for the rotator lambda monitoring alarm_sns_topic_id = f'{secret_id}_alarm_sns_topic' alarm_sns_topic = Topic(self, alarm_sns_topic_id) # subscribe the given emails to the sns topic subscription_emails = get_alarm_subscriptions(secret_id) for email in subscription_emails: alarm_sns_topic.add_subscription(EmailSubscription(email)) # add errors metric alarm to rotator lambda # should send and email notification if the errors metric >= threshold every single time(evaluation_periods) errors_alarm_id = f'{secret_id}_errors_alarm' errors_alarm = rotator_lambda.metric_errors().create_alarm(self, errors_alarm_id, threshold=1, evaluation_periods=1) errors_alarm.add_alarm_action(SnsAction(alarm_sns_topic))
zhaozhaojiejie/zz-my-ui
ui/lib/gojs/mixins/finder.js
import {go, toList} from '../utils/lib' import {download, downloadBlob} from '$ui/utils/download' export default { methods: { /** * 新增节点 * @param {Object|Object[]} data 节点数据或数据数组 */ addNode(data) { const model = this.diagram.model this.commit(() => { Array.isArray(data) ? model.addNodeDataCollection(data) : model.addNodeData(data) }, 'addNode') }, /** * 新增连线 * @param {Object|Object[]} data 连线数据或数据数组 */ addLink(data) { const model = this.diagram.model this.commit(() => { Array.isArray(data) ? model.addLinkDataCollection(data) : model.addLinkData(data) }, 'addLink') }, /** * 删除节点 * @param {string|number|function} keyOrFunc 节点key或匹配函数 */ removeNode(keyOrFunc) { const nodes = typeof keyOrFunc === 'function' ? this.findNodes(keyOrFunc) : this.findNode(keyOrFunc) this.commit(() => { this.diagram.model.removeNodeDataCollection([].concat(nodes)) }) }, /** * 删除连线 * @param {string|number|function} keyOrFunc 连线key或匹配函数 */ removeLink(keyOrFunc) { const links = typeof keyOrFunc === 'function' ? this.findLinks(keyOrFunc) : this.findLink(keyOrFunc) this.commit(() => { this.diagram.model.removeLinkDataCollection([].concat(links)) }) }, /** * 查找单个节点 * @param {string|number|function} keyOrFunc 节点key或匹配函数 * @param {boolean} [isObject] 返回图形对象 * @returns {Object|go.GraphObject} */ findNode(keyOrFunc, isObject) { const model = this.diagram.model const nodeDataArray = model.nodeDataArray || [] const data = typeof keyOrFunc === 'function' ? nodeDataArray.find(keyOrFunc) : model.findNodeDataForKey(keyOrFunc) return isObject ? this.diagram.findNodeForData(data) : data }, /** * 查找单个连线 * @param {string|number|function} keyOrFunc 连线key或匹配函数 * @param {boolean} [isObject] 返回图形对象 * @returns {Object|go.GraphObject} */ findLink(keyOrFunc, isObject) { const model = this.diagram.model const linkDataArray = model.linkDataArray || [] const data = typeof keyOrFunc === 'function' ? linkDataArray.find(keyOrFunc) : model.findLinkDataForKey(keyOrFunc) return isObject ? this.diagram.findLinkForData(data) : data }, /** * 查找多个节点 * @param {function} filter 过滤函数 * @param {boolean} [isObject] 返回图形对象 * @returns {Object[]|go.GraphObject[]} */ findNodes(filter, isObject) { const model = this.diagram.model const nodeDataArray = model.nodeDataArray || [] const dataArray = nodeDataArray.filter(filter) return isObject ? dataArray.map(data => this.diagram.findNodeForData(data)) : dataArray }, /** * 查找多个连线 * @param {function} filter 过滤函数 * @param {boolean} [isObject] 返回图形对象 * @returns {Object[]|go.GraphObject[]} */ findLinks(filter, isObject) { const model = this.diagram.model const linkDataArray = model.linkDataArray || [] const dataArray = linkDataArray.filter(filter) return isObject ? dataArray.map(data => this.diagram.findLinkForData(data)) : dataArray }, /** * 全部节点 * @returns {go.GraphObject[]} */ allNodes() { return toList(this.diagram.nodes) }, /** * 全部连线 * @returns {go.GraphObject[]} */ allLinks() { return toList(this.diagram.links) }, /** * 查找子节点和连线 * @param {string|number} key 节点的key * @param {boolean} [isObject] 返回图形对象 * @param {string} [parentKey=parent] parentKey * @returns {{nodes, links}} */ findChild(key, isObject = false, parentKey = 'parent') { const model = this.diagram.model if (model instanceof go.GraphLinksModel) { const links = this.findLinks(n => n.from === key) const nodes = links.map(link => this.findNode(link.to)) return isObject ? { nodes: nodes.map(data => this.diagram.findNodeForData(data)), links: links.map(data => this.diagram.findLinkForData(data)) } : {nodes, links} } else if (model instanceof go.TreeModel) { const nodeDataArray = model.nodeDataArray || [] const nodes = nodeDataArray.filter(item => item[parentKey] === key) return { nodes: isObject ? nodes.map(data => this.diagram.findNodeForData(data)) : nodes, links: isObject ? this.allLinks().filter(link => link.fromNode?.key === key) : [] } } }, /** * 查找相邻关联的节点和连线 * @param {string|number} key 节点的key * @param {boolean} [isObject] 返回图形对象 * @param {string} [parentKey=parent] parentKey * @returns {{nodes, links}} */ findRelated(key, isObject = false, parentKey = 'parent') { const model = this.diagram.model if (model instanceof go.GraphLinksModel) { const links = this.findLinks(n => n.from === key || n.to === key) const nodes = [] links.forEach(link => { let temp = this.findNode(link.to) temp && nodes.push(temp) temp = this.findNode(link.from) temp && nodes.push(temp) }) return isObject ? { nodes: nodes.map(data => this.diagram.findNodeForData(data)), links: links.map(data => this.diagram.findLinkForData(data)) } : {nodes, links} } else if (model instanceof go.TreeModel) { const nodeDataArray = model.nodeDataArray || [] const currentNode = this.findNode(key) const parent = currentNode ? this.findNode(currentNode[parentKey]) : null const nodes = nodeDataArray.filter(item => item[parentKey] === key) if (parent) { nodes.push(parent) } return { nodes: isObject ? nodes.map(data => this.diagram.findNodeForData(data)) : nodes, links: isObject ? this.allLinks().filter(link => link.fromNode?.key === key || link.toNode?.key === key) : [] } } }, /** * 清空画布 */ clear() { this.diagram.clear() }, undo() { const manager = this.diagram?.undoManager if (manager && manager.canUndo()) { manager.undo() return true } return false }, redo() { const manager = this.diagram?.undoManager if (manager && manager.canRedo()) { manager.redo() return true } return false }, select(parts) { const diagram = this.diagram if (!diagram) return if (parts) { diagram.selectCollection(parts) } else { const nodes = toList(diagram.nodes) const links = toList(diagram.links) const allParts = nodes.concat(links) diagram.selectCollection(allParts) } }, unselect() { const diagram = this.diagram if (!diagram) return diagram.clearSelection() }, selectInvert() { const diagram = this.diagram if (!diagram) return const parts = [] const nodes = diagram.nodes const links = diagram.links const selection = diagram.selection nodes.each(n => { if (!selection.has(n)) { parts.push(n) } }) links.each(n => { if (!selection.has(n)) { parts.push(n) } }) this.select(parts) }, hide(parts) { if (!parts) return const list = parts.iterator ? toList(parts) : parts this.commit(() => { list.forEach(n => { n.visible = false }) }) }, show(parts) { const diagram = this.diagram if (!diagram) return if (parts) { const list = parts.iterator ? toList(parts) : parts this.commit(() => { list.forEach(n => { n.visible = true }) }) } else { const nodes = diagram.nodes const links = diagram.links this.commit(() => { nodes.each(n => { n.visible = true }) links.each(n => { n.visible = true }) }) } }, toImage(opts) { if (!this.diagram) return const image = this.diagram.makeImageData(opts) download(image, Date.now().toString()) }, toJson() { const model = this.diagram?.model if (!model) return const json = model.toJson() const blob = new Blob([json], {type: 'application/octet-stream'}) downloadBlob(blob, Date.now().toString() + '.json') } } }
bkucera2/cypress
packages/server/test/support/fixtures/projects/plugin-run-events/cypress/integration/run_events_spec_2.js
<filename>packages/server/test/support/fixtures/projects/plugin-run-events/cypress/integration/run_events_spec_2.js it('is true', () => {})
QuaMundo/quamundo-legacy
app/models/concerns/benamed.rb
# frozen_string_literal: true module Benamed extend ActiveSupport::Concern included do validates :name, presence: true def to_param [id, name.parameterize].join('-') end end end
BlackYoup/gamecq
StopServer/StopServer.cpp
// StopServer.cpp : Defines the entry point for the application. // #include "stdafx.h" #include "Standard/Event.h" #include "Standard/CommandLine.h" int APIENTRY WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nCmdShow) { CharString sCmdLine = lpCmdLine; CommandLine cmdLine( sCmdLine ); if ( cmdLine.argumentCount() != 1 ) { MessageBox( NULL, TEXT("Usage: StopServer.exe <PID>"), TEXT("Invalid Command Line"), MB_OK ); return 0; } int pid = strtol( cmdLine.argument( 0 ), NULL, 10 ); if ( pid != 0 ) { Event serverStop( CharString().format("StopProcess%u", pid) ); serverStop.signal(); return 1; } return 0; }
amazinglzy/NRA4JsonStore
src/test/java/jp4js/utils/iter/MultiIterTest.java
package jp4js.utils.iter; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import org.junit.Test; import static org.assertj.core.api.Assertions.assertThat; public class MultiIterTest { @Test public void basic01_() { Iter<Integer> iter1 = new ArrayIter<Integer>(new ArrayList<Integer>( Arrays.asList(1, 4, 7) )); Iter<Integer> iter2 = new ArrayIter<Integer>(new ArrayList<Integer>( Arrays.asList(2, 3, 6) )); Iter<Integer> iter3 = new ArrayIter<Integer>(new ArrayList<Integer>( Arrays.asList(2, 5, 9) )); Iter<Integer> iter4 = new ArrayIter<Integer>(new ArrayList<Integer>( Arrays.asList(4, 8, 10) )); Iter<Integer> iter = new MultiIter<>( new ArrayList<Iter<Integer>>(Arrays.asList(iter1, iter2, iter3, iter4)), new Comparator<Integer>(){ @Override public int compare(Integer o1, Integer o2) { return o1.compareTo(o2); } } ); assertThat(iter.valid()).isTrue(); assertThat(iter.read()).isEqualTo(1); iter.next(); assertThat(iter.valid()).isTrue(); assertThat(iter.read()).isEqualTo(2); iter.next(); assertThat(iter.valid()).isTrue(); assertThat(iter.read()).isEqualTo(2); iter.next(); assertThat(iter.valid()).isTrue(); assertThat(iter.read()).isEqualTo(3); iter.next(); assertThat(iter.valid()).isTrue(); assertThat(iter.read()).isEqualTo(4); iter.next(); assertThat(iter.valid()).isTrue(); assertThat(iter.read()).isEqualTo(4); iter.next(); assertThat(iter.valid()).isTrue(); assertThat(iter.read()).isEqualTo(5); iter.next(); assertThat(iter.valid()).isTrue(); assertThat(iter.read()).isEqualTo(6); iter.next(); assertThat(iter.valid()).isTrue(); assertThat(iter.read()).isEqualTo(7); iter.next(); assertThat(iter.valid()).isTrue(); assertThat(iter.read()).isEqualTo(8); iter.next(); assertThat(iter.valid()).isTrue(); assertThat(iter.read()).isEqualTo(9); iter.next(); assertThat(iter.valid()).isTrue(); assertThat(iter.read()).isEqualTo(10); iter.next(); assertThat(iter.valid()).isFalse(); } }
flesher0813/iotdb
service-rpc/src/main/java/org/apache/iotdb/rpc/RpcUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iotdb.rpc; import java.lang.reflect.Proxy; import java.text.SimpleDateFormat; import java.time.Instant; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.iotdb.service.rpc.thrift.EndPoint; import org.apache.iotdb.service.rpc.thrift.TSExecuteStatementResp; import org.apache.iotdb.service.rpc.thrift.TSFetchResultsResp; import org.apache.iotdb.service.rpc.thrift.TSIService; import org.apache.iotdb.service.rpc.thrift.TSInsertTabletsReq; import org.apache.iotdb.service.rpc.thrift.TSStatus; public class RpcUtils { /** * How big should the default read and write buffers be? */ public static final int DEFAULT_BUF_CAPACITY = 64 * 1024; /** * How big is the largest allowable frame? Defaults to 16MB. */ public static final int DEFAULT_MAX_LENGTH = 16384000; private RpcUtils() { // util class } public static final TSStatus SUCCESS_STATUS = new TSStatus( TSStatusCode.SUCCESS_STATUS.getStatusCode()); public static TSIService.Iface newSynchronizedClient(TSIService.Iface client) { return (TSIService.Iface) Proxy.newProxyInstance(RpcUtils.class.getClassLoader(), new Class[]{TSIService.Iface.class}, new SynchronizedHandler(client)); } /** * verify success. * * @param status -status */ public static void verifySuccess(TSStatus status) throws StatementExecutionException { if (status.getCode() == TSStatusCode.MULTIPLE_ERROR.getStatusCode()) { verifySuccess(status.getSubStatus()); return; } if (status.getCode() == TSStatusCode.NEED_REDIRECTION.getStatusCode()) { return; } if (status.code != TSStatusCode.SUCCESS_STATUS.getStatusCode()) { throw new StatementExecutionException(status); } } public static void verifySuccessWithRedirection(TSStatus status) throws StatementExecutionException, RedirectException { verifySuccess(status); if (status.isSetRedirectNode()) { throw new RedirectException(status.getRedirectNode()); } } public static void verifySuccessWithRedirectionForInsertTablets(TSStatus status, TSInsertTabletsReq req) throws StatementExecutionException, RedirectException { verifySuccess(status); if (status.getCode() == TSStatusCode.MULTIPLE_ERROR.getStatusCode()) { Map<String, EndPoint> deviceEndPointMap = new HashMap<>(); List<TSStatus> statusSubStatus = status.getSubStatus(); for (int i = 0; i < statusSubStatus.size(); i++) { TSStatus subStatus = statusSubStatus.get(i); if (subStatus.isSetRedirectNode()) { deviceEndPointMap.put(req.getDeviceIds().get(i), subStatus.getRedirectNode()); } } throw new RedirectException(deviceEndPointMap); } } public static void verifySuccess(List<TSStatus> statuses) throws BatchExecutionException { StringBuilder errMsgs = new StringBuilder(); for (TSStatus status : statuses) { if (status.getCode() != TSStatusCode.SUCCESS_STATUS.getStatusCode() && status.getCode() != TSStatusCode.NEED_REDIRECTION.getStatusCode()) { errMsgs.append(status.getMessage()).append(";"); } } if (errMsgs.length() > 0) { throw new BatchExecutionException(statuses, errMsgs.toString()); } } /** * convert from TSStatusCode to TSStatus according to status code and status message */ public static TSStatus getStatus(TSStatusCode tsStatusCode) { return new TSStatus(tsStatusCode.getStatusCode()); } public static TSStatus getStatus(List<TSStatus> statusList) { TSStatus status = new TSStatus(TSStatusCode.MULTIPLE_ERROR.getStatusCode()); status.setSubStatus(statusList); return status; } /** * convert from TSStatusCode to TSStatus, which has message appending with existed status message * * @param tsStatusCode status type * @param message appending message */ public static TSStatus getStatus(TSStatusCode tsStatusCode, String message) { TSStatus status = new TSStatus(tsStatusCode.getStatusCode()); status.setMessage(message); return status; } public static TSStatus getStatus(int code, String message) { TSStatus status = new TSStatus(code); status.setMessage(message); return status; } public static TSExecuteStatementResp getTSExecuteStatementResp(TSStatusCode tsStatusCode) { TSStatus status = getStatus(tsStatusCode); return getTSExecuteStatementResp(status); } public static TSExecuteStatementResp getTSExecuteStatementResp(TSStatusCode tsStatusCode, String message) { TSStatus status = getStatus(tsStatusCode, message); return getTSExecuteStatementResp(status); } public static TSExecuteStatementResp getTSExecuteStatementResp(TSStatus status) { TSExecuteStatementResp resp = new TSExecuteStatementResp(); TSStatus tsStatus = new TSStatus(status); resp.setStatus(tsStatus); return resp; } public static TSFetchResultsResp getTSFetchResultsResp(TSStatusCode tsStatusCode) { TSStatus status = getStatus(tsStatusCode); return getTSFetchResultsResp(status); } public static TSFetchResultsResp getTSFetchResultsResp(TSStatusCode tsStatusCode, String appendMessage) { TSStatus status = getStatus(tsStatusCode, appendMessage); return getTSFetchResultsResp(status); } public static TSFetchResultsResp getTSFetchResultsResp(TSStatus status) { TSFetchResultsResp resp = new TSFetchResultsResp(); TSStatus tsStatus = new TSStatus(status); resp.setStatus(tsStatus); return resp; } public static final String DEFAULT_TIME_FORMAT = "default"; public static final String DEFAULT_TIMESTAMP_PRECISION = "ms"; public static String setTimeFormat(String newTimeFormat) { String timeFormat; switch (newTimeFormat.trim().toLowerCase()) { case "long": case "number": timeFormat = newTimeFormat.trim().toLowerCase(); break; case DEFAULT_TIME_FORMAT: case "iso8601": timeFormat = newTimeFormat.trim().toLowerCase(); break; default: // use java default SimpleDateFormat to check whether input time format is legal // if illegal, it will throw an exception new SimpleDateFormat(newTimeFormat.trim()); timeFormat = newTimeFormat; break; } return timeFormat; } public static String formatDatetime(String timeFormat, String timePrecision, long timestamp, ZoneId zoneId) { ZonedDateTime dateTime; switch (timeFormat) { case "long": case "number": return Long.toString(timestamp); case DEFAULT_TIME_FORMAT: case "iso8601": return parseLongToDateWithPrecision( DateTimeFormatter.ISO_OFFSET_DATE_TIME, timestamp, zoneId, timePrecision); default: dateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(timestamp), zoneId); return dateTime.format(DateTimeFormatter.ofPattern(timeFormat)); } } @SuppressWarnings("squid:S3776") // Suppress high Cognitive Complexity warning public static String parseLongToDateWithPrecision(DateTimeFormatter formatter, long timestamp, ZoneId zoneid, String timestampPrecision) { if (timestampPrecision.equals("ms")) { long integerofDate = timestamp / 1000; StringBuilder digits = new StringBuilder(Long.toString(timestamp % 1000)); ZonedDateTime dateTime = ZonedDateTime .ofInstant(Instant.ofEpochSecond(integerofDate), zoneid); String datetime = dateTime.format(formatter); int length = digits.length(); if (length != 3) { for (int i = 0; i < 3 - length; i++) { digits.insert(0, "0"); } } return datetime.substring(0, 19) + "." + digits + datetime.substring(19); } else if (timestampPrecision.equals("us")) { long integerofDate = timestamp / 1000_000; StringBuilder digits = new StringBuilder(Long.toString(timestamp % 1000_000)); ZonedDateTime dateTime = ZonedDateTime .ofInstant(Instant.ofEpochSecond(integerofDate), zoneid); String datetime = dateTime.format(formatter); int length = digits.length(); if (length != 6) { for (int i = 0; i < 6 - length; i++) { digits.insert(0, "0"); } } return datetime.substring(0, 19) + "." + digits + datetime.substring(19); } else { long integerofDate = timestamp / 1000_000_000L; StringBuilder digits = new StringBuilder(Long.toString(timestamp % 1000_000_000L)); ZonedDateTime dateTime = ZonedDateTime .ofInstant(Instant.ofEpochSecond(integerofDate), zoneid); String datetime = dateTime.format(formatter); int length = digits.length(); if (length != 9) { for (int i = 0; i < 9 - length; i++) { digits.insert(0, "0"); } } return datetime.substring(0, 19) + "." + digits + datetime.substring(19); } } }
mapmeld/GeoGit
src/core/src/main/java/org/geogit/api/porcelain/FetchOp.java
/* Copyright (c) 2013 OpenPlans. All rights reserved. * This code is licensed under the BSD New License, available at the root * application directory. */ package org.geogit.api.porcelain; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.geogit.api.AbstractGeoGitOp; import org.geogit.api.GlobalInjectorBuilder; import org.geogit.api.ObjectId; import org.geogit.api.Ref; import org.geogit.api.Remote; import org.geogit.api.SymRef; import org.geogit.api.plumbing.LsRemote; import org.geogit.api.plumbing.UpdateRef; import org.geogit.api.plumbing.UpdateSymRef; import org.geogit.api.porcelain.ConfigOp.ConfigAction; import org.geogit.api.porcelain.ConfigOp.ConfigScope; import org.geogit.api.porcelain.FetchResult.ChangedRef; import org.geogit.api.porcelain.FetchResult.ChangedRef.ChangeTypes; import org.geogit.remote.IRemoteRepo; import org.geogit.remote.RemoteUtils; import org.geogit.repository.Repository; import org.opengis.util.ProgressListener; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.inject.Inject; /** * Fetches named heads or tags from one or more other repositories, along with the objects necessary * to complete them. * */ public class FetchOp extends AbstractGeoGitOp<FetchResult> { private boolean all; private boolean prune; private boolean fullDepth = false; private List<Remote> remotes = new ArrayList<Remote>(); private Repository localRepository; private Optional<Integer> depth = Optional.absent(); /** * Constructs a new {@code FetchOp}. */ @Inject public FetchOp(Repository localRepository) { this.localRepository = localRepository; } /** * @param all if {@code true}, fetch from all remotes. * @return {@code this} */ public FetchOp setAll(final boolean all) { this.all = all; return this; } /** * @param prune if {@code true}, remote tracking branches that no longer exist will be removed * locally. * @return {@code this} */ public FetchOp setPrune(final boolean prune) { this.prune = prune; return this; } /** * If no depth is specified, fetch will pull all history from the specified ref(s). If the * repository is shallow, it will maintain the existing depth. * * @param depth maximum commit depth to fetch * @return {@code this} */ public FetchOp setDepth(final int depth) { if (depth > 0) { this.depth = Optional.of(depth); } return this; } /** * If full depth is set on a shallow clone, then the full history will be fetched. * * @param fulldepth whether or not to fetch the full history * @return {@code this} */ public FetchOp setFullDepth(boolean fullDepth) { this.fullDepth = fullDepth; return this; } /** * @param remoteName the name or URL of a remote repository to fetch from * @return {@code this} */ public FetchOp addRemote(final String remoteName) { Preconditions.checkNotNull(remoteName); return addRemote(command(RemoteResolve.class).setName(remoteName)); } /** * @param remoteSupplier the remote repository to fetch from * @return {@code this} */ public FetchOp addRemote(Supplier<Optional<Remote>> remoteSupplier) { Preconditions.checkNotNull(remoteSupplier); Optional<Remote> remote = remoteSupplier.get(); Preconditions.checkState(remote.isPresent(), "Remote could not be resolved."); remotes.add(remote.get()); return this; } /** * Executes the fetch operation. * * @return {@code null} * @see org.geogit.api.AbstractGeoGitOp#call() */ public FetchResult call() { if (all) { // Add all remotes to list. ImmutableList<Remote> localRemotes = command(RemoteListOp.class).call(); for (Remote remote : localRemotes) { if (!remotes.contains(remote)) { remotes.add(remote); } } } else if (remotes.size() == 0) { // If no remotes are specified, default to the origin remote addRemote("origin"); } getProgressListener().started(); Optional<Integer> repoDepth = localRepository.getDepth(); if (repoDepth.isPresent()) { if (fullDepth) { depth = Optional.of(Integer.MAX_VALUE); } if (depth.isPresent()) { if (depth.get() > repoDepth.get()) { command(ConfigOp.class).setAction(ConfigAction.CONFIG_SET) .setScope(ConfigScope.LOCAL).setName(Repository.DEPTH_CONFIG_KEY) .setValue(depth.get().toString()).call(); repoDepth = depth; } } } else if (depth.isPresent() || fullDepth) { // Ignore depth, this is a full repository depth = Optional.absent(); fullDepth = false; } FetchResult result = new FetchResult(); for (Remote remote : remotes) { ProgressListener subProgress = this.subProgress(100.f / remotes.size()); subProgress.started(); final ImmutableSet<Ref> remoteRemoteRefs = command(LsRemote.class).setRemote( Suppliers.ofInstance(Optional.of(remote))).call(); final ImmutableSet<Ref> localRemoteRefs = command(LsRemote.class) .retrieveLocalRefs(true).setRemote(Suppliers.ofInstance(Optional.of(remote))) .call(); // If we have specified a depth to pull, we may have more history to pull from existing // refs. List<ChangedRef> needUpdate = findOutdatedRefs(remote, remoteRemoteRefs, localRemoteRefs, depth); if (prune) { // Delete local refs that aren't in the remote List<Ref> locals = new ArrayList<Ref>(); for (Ref remoteRef : remoteRemoteRefs) { Optional<Ref> localRef = findLocal(remoteRef, localRemoteRefs); if (localRef.isPresent()) { locals.add(localRef.get()); } } for (Ref localRef : localRemoteRefs) { if (!locals.contains(localRef)) { // Delete the ref ChangedRef changedRef = new ChangedRef(localRef, null, ChangeTypes.REMOVED_REF); needUpdate.add(changedRef); command(UpdateRef.class).setDelete(true).setName(localRef.getName()).call(); } } } Optional<IRemoteRepo> remoteRepo = getRemoteRepo(remote); Preconditions.checkState(remoteRepo.isPresent(), "Failed to connect to the remote."); try { remoteRepo.get().open(); } catch (IOException e) { Throwables.propagate(e); } int refCount = 0; for (ChangedRef ref : needUpdate) { if (ref.getType() != ChangeTypes.REMOVED_REF) { refCount++; subProgress.progress((refCount * 100.f) / needUpdate.size()); Optional<Integer> newFetchLimit = depth; // If we haven't specified a depth, but this is a shallow repository, set the // fetch limit to the current repository depth. if (!newFetchLimit.isPresent() && repoDepth.isPresent() && ref.getType() == ChangeTypes.ADDED_REF) { newFetchLimit = repoDepth; } // Fetch updated data from this ref remoteRepo.get().fetchNewData(ref.getNewRef(), newFetchLimit); if (repoDepth.isPresent()) { // Update the repository depth if it is deeper than before. int newDepth = localRepository.getGraphDatabase().getDepth( ref.getNewRef().getObjectId()); if (newDepth > repoDepth.get()) { command(ConfigOp.class).setAction(ConfigAction.CONFIG_SET) .setScope(ConfigScope.LOCAL) .setName(Repository.DEPTH_CONFIG_KEY) .setValue(Integer.toString(newDepth)).call(); repoDepth = Optional.of(newDepth); } } // Update the ref Ref updatedRef = updateLocalRef(ref.getNewRef(), remote, localRemoteRefs); ref.setNewRef(updatedRef); } } if (needUpdate.size() > 0) { result.getChangedRefs().put(remote.getFetchURL(), needUpdate); } // Update HEAD ref if (!remote.getMapped()) { Ref remoteHead = remoteRepo.get().headRef(); updateLocalRef(remoteHead, remote, localRemoteRefs); } try { remoteRepo.get().close(); } catch (IOException e) { Throwables.propagate(e); } subProgress.complete(); } if (fullDepth) { // The full history was fetched, this is no longer a shallow clone command(ConfigOp.class).setAction(ConfigAction.CONFIG_UNSET) .setScope(ConfigScope.LOCAL).setName(Repository.DEPTH_CONFIG_KEY).call(); } getProgressListener().complete(); return result; } /** * @param remote the remote to get * @return an interface for the remote repository */ public Optional<IRemoteRepo> getRemoteRepo(Remote remote) { return RemoteUtils .newRemote(GlobalInjectorBuilder.builder.build(), remote, localRepository); } private Ref updateLocalRef(Ref remoteRef, Remote remote, ImmutableSet<Ref> localRemoteRefs) { final String refName = Ref.REMOTES_PREFIX + remote.getName() + "/" + remoteRef.localName(); Ref updatedRef = remoteRef; if (remoteRef instanceof SymRef) { String targetBranch = Ref.localName(((SymRef) remoteRef).getTarget()); String newTarget = Ref.REMOTES_PREFIX + remote.getName() + "/" + targetBranch; command(UpdateSymRef.class).setName(refName).setNewValue(newTarget).call(); } else { if (remote.getMapped() && !localRepository.commitExists(remoteRef.getObjectId())) { ObjectId mappedId = localRepository.getGraphDatabase().getMapping( remoteRef.getObjectId()); command(UpdateRef.class).setName(refName).setNewValue(mappedId).call(); updatedRef = new Ref(remoteRef.getName(), mappedId, remoteRef.getType()); } else { command(UpdateRef.class).setName(refName).setNewValue(remoteRef.getObjectId()) .call(); } } return updatedRef; } /** * Filters the remote references for the given remote that are not present or outdated in the * local repository */ private List<ChangedRef> findOutdatedRefs(Remote remote, ImmutableSet<Ref> remoteRefs, ImmutableSet<Ref> localRemoteRefs, Optional<Integer> depth) { List<ChangedRef> changedRefs = Lists.newLinkedList(); for (Ref remoteRef : remoteRefs) {// refs/heads/xxx or refs/tags/yyy, though we don't handle // tags yet if (remote.getMapped() && !remoteRef.localName().equals(Ref.localName(remote.getMappedBranch()))) { // for a mapped remote, we are only interested in the branch we are mapped to continue; } Optional<Ref> local = findLocal(remoteRef, localRemoteRefs); if (local.isPresent()) { if (!local.get().getObjectId().equals(remoteRef.getObjectId())) { ChangedRef changedRef = new ChangedRef(local.get(), remoteRef, ChangeTypes.CHANGED_REF); changedRefs.add(changedRef); } else if (depth.isPresent()) { int commitDepth = localRepository.getGraphDatabase().getDepth( local.get().getObjectId()); if (depth.get() > commitDepth) { ChangedRef changedRef = new ChangedRef(local.get(), remoteRef, ChangeTypes.DEEPENED_REF); changedRefs.add(changedRef); } } } else { ChangedRef changedRef = new ChangedRef(null, remoteRef, ChangeTypes.ADDED_REF); changedRefs.add(changedRef); } } return changedRefs; } /** * Finds the corresponding local reference in {@code localRemoteRefs} for the given remote ref * * @param remoteRef a ref in the {@code refs/heads} or {@code refs/tags} namespace as given by * {@link LsRemote} when querying a remote repository * @param localRemoteRefs the list of locally known references of the given remote in the * {@code refs/remotes/<remote name>/} namespace */ private Optional<Ref> findLocal(Ref remoteRef, ImmutableSet<Ref> localRemoteRefs) { for (Ref localRef : localRemoteRefs) { if (localRef.localName().equals(remoteRef.localName())) { return Optional.of(localRef); } } return Optional.absent(); } }
braymar/afl
qemu_mode/qemu-2.10.0/net/filter-replay.c
/* * filter-replay.c * * Copyright (c) 2010-2016 Institute for System Programming * of the Russian Academy of Sciences. * * This work is licensed under the terms of the GNU GPL, version 2 or later. * See the COPYING file in the top-level directory. * */ #include "qemu/osdep.h" #include "clients.h" #include "qapi/error.h" #include "qemu-common.h" #include "qemu/error-report.h" #include "qemu/iov.h" #include "qemu/log.h" #include "qemu/timer.h" #include "qapi/visitor.h" #include "net/filter.h" #include "sysemu/replay.h" #define TYPE_FILTER_REPLAY "filter-replay" #define FILTER_REPLAY(obj) \ OBJECT_CHECK(NetFilterReplayState, (obj), TYPE_FILTER_REPLAY) struct NetFilterReplayState { NetFilterState nfs; ReplayNetState *rns; }; typedef struct NetFilterReplayState NetFilterReplayState; static ssize_t filter_replay_receive_iov(NetFilterState *nf, NetClientState *sndr, unsigned flags, const struct iovec *iov, int iovcnt, NetPacketSent *sent_cb) { NetFilterReplayState *nfrs = FILTER_REPLAY(nf); switch (replay_mode) { case REPLAY_MODE_RECORD: if (nf->netdev == sndr) { replay_net_packet_event(nfrs->rns, flags, iov, iovcnt); return iov_size(iov, iovcnt); } return 0; case REPLAY_MODE_PLAY: /* Drop all packets in replay mode. Packets from the log will be injected by the replay module. */ return iov_size(iov, iovcnt); default: /* Pass all the packets. */ return 0; } } static void filter_replay_instance_init(Object *obj) { NetFilterReplayState *nfrs = FILTER_REPLAY(obj); nfrs->rns = replay_register_net(&nfrs->nfs); } static void filter_replay_instance_finalize(Object *obj) { NetFilterReplayState *nfrs = FILTER_REPLAY(obj); replay_unregister_net(nfrs->rns); } static void filter_replay_class_init(ObjectClass *oc, void *data) { NetFilterClass *nfc = NETFILTER_CLASS(oc); nfc->receive_iov = filter_replay_receive_iov; } static const TypeInfo filter_replay_info = { .name = TYPE_FILTER_REPLAY, .parent = TYPE_NETFILTER, .class_init = filter_replay_class_init, .instance_init = filter_replay_instance_init, .instance_finalize = filter_replay_instance_finalize, .instance_size = sizeof(NetFilterReplayState), }; static void filter_replay_register_types(void) { type_register_static(&filter_replay_info); } type_init(filter_replay_register_types);
estuaryoss/seeder
tools/PlanStateComparator.go
<reponame>estuaryoss/seeder package tools import ( "reflect" "seeder/models" ) type PlanStateComparator struct { PlanDeployments []*models.ServerDeployment StateDeployments []*models.ServerDeployment PlannedChanges []*models.ServerDeployment NoChanges []*models.ServerDeployment } func NewPlanStateComparator(plan []*models.ServerDeployment, state []*models.ServerDeployment) *PlanStateComparator { return &PlanStateComparator{ PlanDeployments: plan, StateDeployments: state, PlannedChanges: make([]*models.ServerDeployment, 0), NoChanges: make([]*models.ServerDeployment, 0), } } func (planStateComparator *PlanStateComparator) GetPlannedChanges() []*models.ServerDeployment { for _, planDeployment := range planStateComparator.PlanDeployments { if !planStateComparator.isDeploymentFound(planDeployment) { planStateComparator.PlannedChanges = append(planStateComparator.PlannedChanges, planDeployment) } } return planStateComparator.PlannedChanges } func (planStateComparator *PlanStateComparator) GetNoChanges() []*models.ServerDeployment { for _, planDeployment := range planStateComparator.PlanDeployments { if planStateComparator.isDeploymentFound(planDeployment) { planStateComparator.NoChanges = append(planStateComparator.NoChanges, planDeployment) } } return planStateComparator.NoChanges } func (planStateComparator *PlanStateComparator) GetPlan() []*models.ServerDeployment { return planStateComparator.PlanDeployments } func (planStateComparator *PlanStateComparator) isDeploymentFound(deployment *models.ServerDeployment) bool { deployment.RecreateDeployment = true for _, stateDeployment := range planStateComparator.StateDeployments { deployment.Deployer = stateDeployment.Deployer deployment.Discovery = stateDeployment.Discovery if planStateComparator.isDeploymentEqual(deployment, stateDeployment) { deployment.RecreateDeployment = false return true } } return false } func (planStateComparator *PlanStateComparator) isDeploymentEqual(plan *models.ServerDeployment, state *models.ServerDeployment) bool { if len(plan.Containers) != len(state.Containers) { plan.RecreateDeployment = true } if plan.Id == state.Id && planStateComparator.isMetadataEqual(plan.Metadata, state.Metadata) && len(plan.Containers) == len(state.Containers) { return true } return false } func (planStateComparator *PlanStateComparator) isMetadataEqual(planMetadata *models.XMetadata, stateMetadata *models.XMetadata) bool { if planMetadata.Name == stateMetadata.Name && reflect.DeepEqual(planMetadata.Labels, stateMetadata.Labels) { return true } return false }
Elizabeth-Warren/supportal-frontend
constants/LeadEventCategories.js
<gh_stars>10-100 const LeadEventCategories = Object.freeze({ SUCCESSFUL: 'SUCCESSFUL', UNAVAILABLE: 'UNAVAILABLE', }); export default LeadEventCategories;
eontool/electron-starter-app
compiled/app.js
"use strict"; //Common libraries const jQuery = require("jquery"); const angular = require("angular"); require("angular-ui-router"); window.$ = window.jQuery = jQuery; //Constants const app_constant_1 = require("./constants/app.constant"); //Configuration const routes_config_1 = require("./configuration/routes.config"); const transitions_config_1 = require("./configuration/transitions.config"); const interceptors_config_1 = require("./configuration/interceptors.config"); //Controllers const main_controller_1 = require("./controllers/main.controller"); const auth_controller_1 = require("./controllers/auth.controller"); //Services const animation_service_1 = require("./services/animation.service"); const template_service_1 = require("./services/template.service"); //Factories const interceptor_factory_1 = require("./factories/interceptor.factory"); angular .module('MainApp', ['ui.router', 'templates']) .constant('Constants', app_constant_1.default()) .config(routes_config_1.default) .config(interceptors_config_1.default) .run(transitions_config_1.default) .service('Animation', animation_service_1.default) .service('TemplateService', template_service_1.default) .controller('MainController', main_controller_1.default) .controller('AuthCtrl', auth_controller_1.default) .factory('GenericInterceptor', interceptor_factory_1.default.factory);
rohan2453/ZooRestaurant
src/main/java/edu/nwmissouri/zoo04lab/Elephant.java
<filename>src/main/java/edu/nwmissouri/zoo04lab/Elephant.java /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package edu.nwmissouri.zoo04lab; enum SuperPlanet{ EARTH, JUPITER, VENUS, NEPTUNE, URANUS, } /** * * @author Homakesavadurgaprasad OMTRI (S544929) */ public class Elephant extends Animal { /** * This method is used to get the string from the parent class * * @param name Elephant */ public Elephant(String name) { super(name); } /** * This is used to print name of the Animal * */ @Override public void speak() { System.out.printf(" I'm %s.I'm an Elephant!", this.name); } /** * This is used to print Animal food * */ @Override public void move() { System.out.println(" I could not able to swim in water but would like to walk!. "); } public void profess() { double a = 2.5; int b = 5; double c = getElephantAddition(a, b); System.out.printf("I know ElephantAddition! %4.2f plus %d is %4.2f \n", a, b, c); } public double getElephantAddition(double valueOne, int valueTwo) { return valueOne + valueTwo; } public static void main(String[] args) { Elephant Tommy = new Elephant("Tommy"); Tommy.speak(); Tommy.move(); Tommy.profess(); System.out.println("My Super Planet is :"+SuperPlanet.EARTH); } }
ekmixon/open-smart-grid-platform
osgp/platform/osgp-domain-core/src/main/java/org/opensmartgridplatform/domain/core/valueobjects/smartmetering/PowerQualityObject.java
<reponame>ekmixon/open-smart-grid-platform /* * Copyright 2021 <NAME>. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ package org.opensmartgridplatform.domain.core.valueobjects.smartmetering; import java.io.Serializable; public class PowerQualityObject implements Serializable { private static final long serialVersionUID = 991045734132231909L; private final String name; private final String unit; public PowerQualityObject(final String name, final String unit) { this.name = name; this.unit = unit; } public String getName() { return this.name; } public String getUnit() { return this.unit; } }
arsysop/loft-rdm
bundles/ru.arsysop.loft.rgm.synopsis.model/src-gen/ru/arsysop/loft/rgm/synopsis/model/impl/NamespaceSynopsisImpl.java
<reponame>arsysop/loft-rdm /******************************************************************************* * Copyright (c) 2021 ArSysOp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * SPDX-License-Identifier: Apache-2.0 * * Contributors: * (ArSysOp) - initial API and implementation *******************************************************************************/ package ru.arsysop.loft.rgm.synopsis.model.impl; import java.util.Collection; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.InternalEList; import ru.arsysop.loft.rgm.synopsis.model.api.DefinitionSynopsis; import ru.arsysop.loft.rgm.synopsis.model.api.NamespaceSynopsis; import ru.arsysop.loft.rgm.synopsis.model.meta.SynopsisPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Namespace</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link ru.arsysop.loft.rgm.synopsis.model.impl.NamespaceSynopsisImpl#getDefinitions <em>Definitions</em>}</li> * </ul> * * @generated */ public class NamespaceSynopsisImpl extends DefinitionSynopsisImpl implements NamespaceSynopsis { /** * The cached value of the '{@link #getDefinitions() <em>Definitions</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getDefinitions() * @generated * @ordered */ protected EList<DefinitionSynopsis> definitions; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected NamespaceSynopsisImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return SynopsisPackage.eINSTANCE.getNamespaceSynopsis(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public EList<DefinitionSynopsis> getDefinitions() { if (definitions == null) { definitions = new EObjectContainmentEList<DefinitionSynopsis>(DefinitionSynopsis.class, this, SynopsisPackage.NAMESPACE_SYNOPSIS__DEFINITIONS); } return definitions; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case SynopsisPackage.NAMESPACE_SYNOPSIS__DEFINITIONS: return ((InternalEList<?>)getDefinitions()).basicRemove(otherEnd, msgs); default: return super.eInverseRemove(otherEnd, featureID, msgs); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case SynopsisPackage.NAMESPACE_SYNOPSIS__DEFINITIONS: return getDefinitions(); default: return super.eGet(featureID, resolve, coreType); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case SynopsisPackage.NAMESPACE_SYNOPSIS__DEFINITIONS: getDefinitions().clear(); getDefinitions().addAll((Collection<? extends DefinitionSynopsis>)newValue); return; default: super.eSet(featureID, newValue); return; } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case SynopsisPackage.NAMESPACE_SYNOPSIS__DEFINITIONS: getDefinitions().clear(); return; default: super.eUnset(featureID); return; } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case SynopsisPackage.NAMESPACE_SYNOPSIS__DEFINITIONS: return definitions != null && !definitions.isEmpty(); default: return super.eIsSet(featureID); } } } //NamespaceImpl
pipecraft/pipes
pipes-core/src/main/java/org/pipecraft/infra/monitoring/JsonMonitorableWrapper.java
<filename>pipes-core/src/main/java/org/pipecraft/infra/monitoring/JsonMonitorableWrapper.java<gh_stars>1-10 package org.pipecraft.infra.monitoring; import java.util.HashMap; import java.util.Map; import net.minidev.json.JSONObject; /** * A simple JsonMonitorable implementation that wraps a set of child monitorables and adds own metrics. * * @author <NAME> */ public class JsonMonitorableWrapper implements JsonMonitorable { private final Map<String, ? extends JsonMonitorable> children; private final JSONObject ownMetrics; public JsonMonitorableWrapper(JSONObject ownMetrics, Map<String, ? extends JsonMonitorable> children) { this.ownMetrics = ownMetrics; this.children = children; } public JsonMonitorableWrapper(Map<String, ? extends JsonMonitorable> children) { this(new JSONObject(), children); } public JsonMonitorableWrapper(JSONObject ownMetrics) { this(ownMetrics, new HashMap<>()); } @Override public JSONObject getOwnMetrics() { return new JSONObject(ownMetrics); } @Override public Map<String, ? extends JsonMonitorable> getChildren() { return children; } }
hacfins/-el-tree-select
public/assets/element-plus-1.2.0-beta.2/lib/components/menu/src/menu-item-group.vue_vue&type=script&lang.js
<filename>public/assets/element-plus-1.2.0-beta.2/lib/components/menu/src/menu-item-group.vue_vue&type=script&lang.js 'use strict'; Object.defineProperty(exports, '__esModule', { value: true }); var vue = require('vue'); var error = require('../../../utils/error.js'); var menuItemGroup = require('./menu-item-group.js'); const COMPONENT_NAME = "ElMenuItemGroup"; var script = vue.defineComponent({ name: COMPONENT_NAME, props: menuItemGroup.menuItemGroupProps, setup() { const instance = vue.getCurrentInstance(); const menu = vue.inject("rootMenu"); if (!menu) error.throwError(COMPONENT_NAME, "can not inject root menu"); const levelPadding = vue.computed(() => { if (menu.props.collapse) return 20; let padding = 20; let parent = instance.parent; while (parent && parent.type.name !== "ElMenu") { if (parent.type.name === "ElSubMenu") { padding += 20; } parent = parent.parent; } return padding; }); return { levelPadding }; } }); exports["default"] = script; //# sourceMappingURL=menu-item-group.vue_vue&type=script&lang.js.map
findopendata/findopendata
tests/test_avro.py
import io import unittest from collections import OrderedDict import fastavro from findopendata.parsers.avro import JSON2AvroRecords, avro2json records = [ {"username": "javasucks", "email": "<EMAIL>"}, {"username": "moonshoot", "email": "<EMAIL>"}, {"username": "twilight", "email": "<EMAIL>"}, {"username": "birdeye", "email": "<EMAIL>", "amount": 0}, {"username": "birdeye", "amount": 1000}, ] record_nested = [ { "username": "javasucks", "email": "<EMAIL>", "location": { "latitude": 12.22, "longitude": -84.23, }, }, { "username": "moonshoot", "email": "<EMAIL>", "location": { "latitude": 11.22, "longitude": -54.23, }, }, { "username": "twilight", "email": "<EMAIL>", "location": { "latitude": 19.22, "longitude": -80.23, }, }, { "username": "birdeye", "email": "<EMAIL>", "amount": 0, "location": { "latitude": 120.22, "longitude": 23.23, }, }, { "username": "birdeye", "amount": 1000, "location": { "latitude": 22.22, "longitude": 30.23, }, }, ] field_names = ["email", "username", "amount"] field_names_nested = ["email", "username", "amount", "location"] class TestJSON2AvroRecords(unittest.TestCase): def test_basic(self): test_records = JSON2AvroRecords((r for r in records)) self.assertEqual(len(list(test_records.get())), 5) schema = test_records.schema self.assertEqual(len(schema["fields"]), 3) def test_field_order(self): test_records = JSON2AvroRecords((r for r in records), field_names=field_names) schema = test_records.schema self.assertEqual([f["name"] for f in schema["fields"]], field_names) def test_nested(self): test_records = JSON2AvroRecords((r for r in record_nested), field_names=field_names) schema = test_records.schema self.assertEqual([f["name"] for f in schema["fields"]], field_names_nested) class TestAvro2JSONRecords(unittest.TestCase): def test_avro2json(self): test_records = JSON2AvroRecords((r for r in records), field_names=field_names) buf = io.BytesIO(b'') fastavro.writer(buf, test_records.schema, test_records.get()) buf.seek(0) for record in avro2json(buf): self.assertTrue(isinstance(record, OrderedDict)) self.assertEqual(list(record.keys()), field_names) if __name__ == "__main__": unittest.main()
croz-ltd/nrich
nrich-search/src/test/java/net/croz/nrich/search/converter/DefaultStringToTypeConverterTest.java
/* * Copyright 2020-2022 <NAME>.o.o, the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package net.croz.nrich.search.converter; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import java.math.BigDecimal; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.OffsetDateTime; import java.time.OffsetTime; import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Arrays; import java.util.Date; import java.util.TimeZone; import java.util.stream.Stream; import static net.croz.nrich.search.converter.testutil.ConverterGeneratingUtil.dateOf; import static net.croz.nrich.search.converter.testutil.ConverterGeneratingUtil.instantOf; import static net.croz.nrich.search.converter.testutil.ConverterGeneratingUtil.localDateOf; import static net.croz.nrich.search.converter.testutil.ConverterGeneratingUtil.localDateTimeOf; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.params.provider.Arguments.arguments; class DefaultStringToTypeConverterTest { private final DefaultStringToTypeConverter defaultStringToTypeConverter = new DefaultStringToTypeConverter( Arrays.asList("dd.MM.yyyy.", "dd.MM.yyyy.'T'HH:mm", "dd.MM.yyyy.'T'HH:mm'Z'", "dd.MM.yyyy.'T'HH:mmXXX", "HH:mmXXX"), Arrays.asList("#0.00", "#0,00"), "^(?i)\\s*(true|yes)\\s*$", "^(?i)\\s*(false|no)\\s*$" ); static { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); } @MethodSource("shouldConvertStringValueToRequiredValueMethodSource") @ParameterizedTest void shouldConvertStringValueToRequiredValue(String stringValue, Class<?> typeToConvertTo, Object expectedValue) { // when Object convertedValue = defaultStringToTypeConverter.convert(stringValue, typeToConvertTo); // then assertThat(convertedValue).isEqualTo(expectedValue); } private static Stream<Arguments> shouldConvertStringValueToRequiredValueMethodSource() { ZoneId defaultZone = ZoneId.systemDefault(); return Stream.of( arguments(null, Boolean.class, null), arguments(null, DefaultStringToTypeConverterTest.class, null), arguments("true", Boolean.class, Boolean.TRUE), arguments("yes", Boolean.class, Boolean.TRUE), arguments("no", Boolean.class, Boolean.FALSE), arguments("1", Long.class, 1L), arguments("D", Long.class, null), arguments("5", Integer.class, 5), arguments("5", Short.class, Short.valueOf("5")), arguments("ONE", Value.class, Value.ONE), arguments("01.01.1970.", Date.class, dateOf("01.01.1970.")), arguments("not a date", Date.class, null), arguments("01.01.2020.T11:11", Instant.class, instantOf("01.01.2020.T11:11")), arguments("01.01.1970.", LocalDate.class, localDateOf("01.01.1970.")), arguments("01.01.2020.T11:11", LocalDateTime.class, localDateTimeOf("01.01.2020.T11:11")), arguments("01.01.2020.T11:11Z", OffsetDateTime.class, instantOf("01.01.2020.T11:11").atZone(defaultZone).toOffsetDateTime()), arguments("11:11Z", OffsetTime.class, instantOf("01.01.2020.T11:11").atZone(defaultZone).toOffsetDateTime().toOffsetTime()), arguments("01.01.2020.T11:11Z", ZonedDateTime.class, instantOf("01.01.2020.T11:11").atZone(defaultZone)), arguments("1.1", BigDecimal.class, new BigDecimal("1.1")), arguments("1.1", Float.class, Double.valueOf("1.1")), arguments("1.1", Double.class, Double.valueOf("1.1")), arguments("nn", Double.class, null) ); } enum Value { ONE } }
ponder-lab/dari
h2/src/main/java/com/psddev/dari/h2/SearchUpdateTrigger.java
<gh_stars>10-100 package com.psddev.dari.h2; import com.psddev.dari.db.Query; import com.psddev.dari.util.ObjectUtils; import org.h2.api.Trigger; import org.jooq.DSLContext; import org.jooq.Field; import org.jooq.Record; import org.jooq.SQLDialect; import org.jooq.Table; import org.jooq.impl.DSL; import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.SQLException; import java.util.List; import java.util.Map; import java.util.UUID; public class SearchUpdateTrigger implements Trigger { protected static final Table<Record> TABLE = DSL.table(DSL.name("RecordSearch")); protected static final Field<UUID> ID_FIELD = DSL.field(DSL.name("id"), UUID.class); protected static final Field<String> FIELD_NAME_FIELD = DSL.field(DSL.name("fieldName"), String.class); protected static final Field<String> VALUE_FIELD = DSL.field(DSL.name("value"), String.class); @Override public void init(Connection connection, String schemaName, String triggerName, String tableName, boolean before, int type) { } @Override public void fire(Connection connection, Object[] oldRow, Object[] newRow) throws SQLException { try (DSLContext context = DSL.using(connection, SQLDialect.H2)) { // DELETE? if (newRow == null) { context.deleteFrom(TABLE) .where(ID_FIELD.eq((UUID) oldRow[0])) .execute(); // INSERT or UPDATE. } else { @SuppressWarnings("unchecked") Map<String, Object> data = (Map<String, Object>) ObjectUtils.fromJson(new String((byte[]) newRow[2], StandardCharsets.UTF_8)); StringBuilder any = new StringBuilder(); for (Map.Entry<String, Object> entry : data.entrySet()) { StringBuilder search = new StringBuilder(); appendToSearch(search, entry.getValue()); any.append(search); context.mergeInto(TABLE) .columns(ID_FIELD, FIELD_NAME_FIELD, VALUE_FIELD) .key(ID_FIELD, FIELD_NAME_FIELD) .values((UUID) newRow[0], entry.getKey(), search.toString()) .execute(); } context.mergeInto(TABLE) .columns(ID_FIELD, FIELD_NAME_FIELD, VALUE_FIELD) .key(ID_FIELD, FIELD_NAME_FIELD) .values((UUID) newRow[0], Query.ANY_KEY, any.toString()) .execute(); } } } private void appendToSearch(StringBuilder search, Object value) { if (value != null) { if (value instanceof List) { ((List<?>) value).forEach(v -> appendToSearch(search, v)); } else if (value instanceof Map) { ((Map<?, ?>) value).values().forEach(v -> appendToSearch(search, v)); } else { search.append(value); search.append(' '); } } } @Override public void close() { } @Override public void remove() { } }
HANS-2002/Btech-first-year-questions-C
class8/2.c
<reponame>HANS-2002/Btech-first-year-questions-C #include<stdio.h> int main() { //Q2.WAP to print your name 5 times using while loop. int i=1; while(i<=5) { printf("HP \n"); i++; } return 0; }
vanduc1102/caption
main/data/extensions.js
const extensions = [ "3g2", "3gp", "3gp2", "3gpp", "60d", "ajp", "asf", "asx", "avchd", "avi", "bik", "bix", "box", "cam", "dat", "divx", "dmf", "dv", "dvr-ms", "evo", "flc", "fli", "flic", "flv", "flx", "gvi", "gvp", "h264", "m1v", "m2p", "m2ts", "m2v", "m4e", "m4v", "mjp", "mjpeg", "mjpg", "mkv", "moov", "mov", "movhd", "movie", "movx", "mp4", "mpe", "mpeg", "mpg", "mpv", "mpv2", "mxf", "nsv", "nut", "ogg", "ogm", "omf", "ps", "qt", "ram", "rm", "rmvb", "swf", "ts", "vfw", "vid", "video", "viv", "vivo", "vob", "vro", "wm", "wmv", "wmx", "wrap", "wvx", "wx", "x264", "xvid", ]; module.exports = extensions;
PresidentWarfield/SpiCall_Artemide_Exodus
spicall_artemide_spartacus_exodus/com/google/gson/b/a/d.java
<gh_stars>1-10 package com.google.gson.b.a; import com.google.gson.a.b; import com.google.gson.b.c; import com.google.gson.b.h; import com.google.gson.c.a; import com.google.gson.f; import com.google.gson.k; import com.google.gson.q; import com.google.gson.s; import com.google.gson.t; public final class d implements t { private final c a; public d(c paramc) { this.a = paramc; } s<?> a(c paramc, f paramf, a<?> parama, b paramb) { Object localObject = paramc.a(a.b(paramb.a())).a(); if ((localObject instanceof s)) { paramc = (s)localObject; } else if ((localObject instanceof t)) { paramc = ((t)localObject).a(paramf, parama); } else { boolean bool = localObject instanceof q; if ((!bool) && (!(localObject instanceof k))) { paramc = new StringBuilder(); paramc.append("Invalid attempt to bind an instance of "); paramc.append(localObject.getClass().getName()); paramc.append(" as a @JsonAdapter for "); paramc.append(parama.toString()); paramc.append(". @JsonAdapter value must be a TypeAdapter, TypeAdapterFactory,"); paramc.append(" JsonSerializer or JsonDeserializer."); throw new IllegalArgumentException(paramc.toString()); } k localk = null; if (bool) { paramc = (q)localObject; } else { paramc = null; } if ((localObject instanceof k)) { localk = (k)localObject; } paramc = new l(paramc, localk, paramf, parama, null); } paramf = paramc; if (paramc != null) { paramf = paramc; if (paramb.b()) { paramf = paramc.a(); } } return paramf; } public <T> s<T> a(f paramf, a<T> parama) { b localb = (b)parama.a().getAnnotation(b.class); if (localb == null) { return null; } return a(this.a, paramf, parama, localb); } } /* Location: ~/com/google/gson/b/a/d.class * * Reversed by: J */
lcmftianci/licodeanalysis
vosvideoserver/VosVideo.Communication/WebsocketClient.cpp
<filename>vosvideoserver/VosVideo.Communication/WebsocketClient.cpp #include "stdafx.h" #include "WebsocketClient.h" using namespace vosvideo::communication; WebsocketClient::WebsocketClient(std::shared_ptr<WebsocketClientEngine> websocketClientEngine) : engine_(websocketClientEngine) { } WebsocketClient::~WebsocketClient(void) { } boost::signals2::connection WebsocketClient::ConnectToConnectionProblemSignal(boost::signals2::signal<void()>::slot_function_type subscriber) { return engine_->ConnectToConnectionProblemSignal(subscriber); } void WebsocketClient::Connect(std::wstring const & url) const { engine_->Connect(url); } void WebsocketClient::Send(const std::string &msg) { engine_->Send(msg); } void WebsocketClient::Close() { throw std::exception("not implemented yet"); }
Scorpionchiques/qmlcore-android
app/src/main/java/com/pureqml/android/runtime/LocalStorage.java
<filename>app/src/main/java/com/pureqml/android/runtime/LocalStorage.java package com.pureqml.android.runtime; import android.content.Context; import android.util.Log; import com.eclipsesource.v8.Releasable; import com.eclipsesource.v8.V8Array; import com.eclipsesource.v8.V8Function; import com.eclipsesource.v8.V8Object; import com.pureqml.android.IExecutionEnvironment; import java.io.FileInputStream; import java.io.FileOutputStream; public final class LocalStorage extends BaseObject { public static final String TAG = "localstorage"; final int MaxStorageSize = 128 * 128 * 1024; public LocalStorage(IExecutionEnvironment env) { super(env); Log.i(TAG, "local storage created"); } public void get(String name, V8Function callback, V8Function error, V8Object origin) { Log.i(TAG, "getting value " + name); V8Array args = new V8Array(_env.getRuntime()); Object ret = null; try { FileInputStream file = _env.getContext().openFileInput(name + ".storage"); Log.d(TAG, "opened file " + name + " for reading..."); byte[] data = new byte[MaxStorageSize]; int r = file.read(data); String stringData = new String(data, 0, r, "UTF-8"); args.push(stringData); ret = callback.call(origin, args); } catch (Exception ex) { Log.w(TAG, "can't open file " + name + " for reading"); args.push(ex.toString()); ret = error.call(origin, args); //indicate error } finally { if (ret instanceof Releasable) ((Releasable)ret).release(); args.close(); } } public void set(String name, String value, V8Function error, V8Object origin) { Log.i(TAG, "setting value " + name); V8Array args = new V8Array(_env.getRuntime()); Object ret = null; try { FileOutputStream file = _env.getContext().openFileOutput(name + ".storage", Context.MODE_PRIVATE); Log.i(TAG, "opened file for writing..."); file.write(value.getBytes("UTF-8")); } catch (Exception ex) { Log.e(TAG, "can't open file for writing"); args.push(ex.toString()); ret = error.call(origin, args); //indicate error } finally { if (ret instanceof Releasable) ((Releasable)ret).release(); args.close(); } } public void erase(String name, V8Function error, V8Object origin) { Log.i(TAG, "erasing value " + name); V8Array args = new V8Array(_env.getRuntime()); Object ret = null; try { _env.getContext().deleteFile(name + ".storage"); Log.i(TAG, "file deleted..."); } catch (Exception ex) { Log.e(TAG, "can't delete file"); args.push(ex.toString()); ret = error.call(origin, args); //indicate error } finally { if (ret instanceof Releasable) ((Releasable)ret).release(); args.close(); } } }
Nuvoloso/storelandia_open_source
nuvo/map_mfl.c
<gh_stars>0 /* Copyright 2019 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "nuvo_pr.h" #include "nuvo_pr_sync.h" #include "manifest.h" #include "nuvo_vol_series.h" #include "map_priv.h" #include "map_replay.h" #include "nuvo_range_lock.h" #include "lun.h" #include "resilience.h" #include "map_free_lun.h" #include <stdlib.h> void nuvo_map_mfl_req_init(struct nuvo_map_free_lun_request *mfl_req, struct nuvo_lun *lun) { mfl_req->lun = lun; mfl_req->offset = 0; mfl_req->free_offset = 0; mfl_req->map_mfl_cnt = 0; mfl_req->map_load_cnt = 0; mfl_req->state = MFL_IN_PROGRESS; mfl_req->work_state = MFL_WORK_LOAD_MAPS; mfl_req->dirty_cnt = 0; NUVO_PRINT("mfl work begin on lun(%d) lun_state:%d", lun->snap_id, lun->lun_state); } bool nuvo_map_mfl_is_paused(struct nuvo_map_free_lun_request *mfl_req) { NUVO_ASSERT_MUTEX_HELD(&mfl_req->mutex); return (mfl_req->state == MFL_PAUSED); } void nuvo_map_mfl_pause(struct nuvo_map_free_lun_request *mfl_req) { NUVO_ASSERT_MUTEX_HELD(&mfl_req->mutex); mfl_req->state = MFL_PAUSED; } static int nuvo_map_mfl_dirty_cnt_threshold = NUVO_MAP_MFL_DIRTY_CNT_THRESHOLD; void nuvo_map_mfl_set_dirty_cnt_threshold(int threshold) { NUVO_PRINT("setting global mfl dirty cnt threshold:%d", threshold); nuvo_map_mfl_dirty_cnt_threshold = threshold; } bool nuvo_map_mfl_need_pausing(struct nuvo_map_free_lun_request *mfl_req) { if (mfl_req->dirty_cnt > nuvo_map_mfl_dirty_cnt_threshold) { NUVO_PRINT("pause mfl:%p work_state:%d lun(%d) vol:%p dirty_cnt:%d threshold:%d vol uuid:" NUVO_LOG_UUID_FMT, mfl_req, mfl_req->work_state, mfl_req->lun->snap_id, mfl_req->lun->vol, mfl_req->dirty_cnt, nuvo_map_mfl_dirty_cnt_threshold, NUVO_LOG_UUID(mfl_req->lun->vol->vs_uuid)); return (true); } // Test the pointers along the way because they are NULL in unit test :() if (mfl_req->lun && mfl_req->lun->vol && nuvo_mfst_slog_filling(mfl_req->lun->vol)) { NUVO_PRINT("pause mfl:%p work_state:%d lun(%d) vol:%p slog full vol uuid:" NUVO_LOG_UUID_FMT, mfl_req, mfl_req->work_state, mfl_req->lun->snap_id, mfl_req->lun->vol, NUVO_LOG_UUID(mfl_req->lun->vol->vs_uuid)); return (true); } return (false); } void nuvo_map_mfl_init(struct nuvo_map_free_lun_request *mfl_req) { nuvo_mutex_init(&mfl_req->mutex); nuvo_cond_init(&mfl_req->cond); mfl_req->state = MFL_HALTED; mfl_req->work_state = MFL_WORK_NONE; } void nuvo_map_mfl_start(struct nuvo_map_free_lun_request *mfl_req) { nuvo_mutex_lock(&mfl_req->mutex); mfl_req->state = MFL_NONE; nuvo_mutex_unlock(&mfl_req->mutex); struct nuvo_space_vol *space = nuvo_containing_object(mfl_req, struct nuvo_space_vol, mfl_req); nuvo_mutex_lock(&space->space_vol_mutex); nuvo_vol_new_needs_work_mfl(space); nuvo_mutex_unlock(&space->space_vol_mutex); } void nuvo_map_mfl_stop(struct nuvo_map_free_lun_request *mfl_req) { nuvo_mutex_lock(&mfl_req->mutex); while (mfl_req->state != MFL_HALTED) { //if paused, lets not bother if (nuvo_map_mfl_is_paused(mfl_req) || (mfl_req->state == MFL_NONE)) { mfl_req->state = MFL_HALTED; break; } // if running, halt mfl nuvo_map_mfl_trigger_halting(mfl_req); nuvo_map_mfl_wait_for_halt(mfl_req); } nuvo_mutex_unlock(&mfl_req->mutex); } void nuvo_map_mfl_kick(struct nuvo_map_free_lun_request *mfl_req) { bool kick = false; // Note : dont need the lock here. // If we read the state here as MFL_NONE, the mfl is not in progress/hasnt begun. // If another mfl begin raced with us, the only concern here is that we missed // seeing a PAUSED state(which is unlikely, given the time from NONE->PAUSED) // And in any case, if we miss seeing a PAUSED state, the task would get kicked again at the // end of next CP if (mfl_req->state == MFL_NONE) { return; } nuvo_mutex_lock(&mfl_req->mutex); mfl_req->dirty_cnt = 0; if (mfl_req->state == MFL_PAUSED) { // assert since you cant pause in free entries. NUVO_ASSERT(mfl_req->work_state == MFL_WORK_LOAD_MAPS); mfl_req->state = MFL_IN_PROGRESS; kick = true; } nuvo_mutex_unlock(&mfl_req->mutex); if (kick) { NUVO_PRINT("kick mfl:%p work_state:%d lun(%d) vol:%p vol uuid:" NUVO_LOG_UUID_FMT, mfl_req, mfl_req->work_state, mfl_req->lun->snap_id, mfl_req->lun->vol, NUVO_LOG_UUID(mfl_req->lun->vol->vs_uuid)); nuvo_mutex_lock(&mfl_req->lun->vol->log_volume.space.space_vol_mutex); nuvo_vol_needs_work_mfl(&mfl_req->lun->vol->log_volume.space); nuvo_mutex_unlock(&mfl_req->lun->vol->log_volume.space.space_vol_mutex); } } void nuvo_map_mfl_inc_dirty_cnt(struct nuvo_map_free_lun_request *mfl_req) { nuvo_mutex_lock(&mfl_req->mutex); mfl_req->dirty_cnt++; nuvo_mutex_unlock(&mfl_req->mutex); } void nuvo_map_mfl_req_reset(struct nuvo_map_free_lun_request *mfl_req) { nuvo_mutex_lock(&mfl_req->mutex); NUVO_ASSERT(mfl_req->map_mfl_cnt == mfl_req->map_load_cnt); mfl_req->lun = NULL; mfl_req->offset = 0; mfl_req->free_offset = 0; mfl_req->map_mfl_cnt = 0; mfl_req->map_load_cnt = 0; // if we are halting, let's not change the state // the mfl engine will handle a pending halt after mfl done if (mfl_req->state == MFL_IN_PROGRESS) { mfl_req->state = MFL_NONE; } mfl_req->work_state = MFL_WORK_NONE; mfl_req->dirty_cnt = 0; nuvo_mutex_unlock(&mfl_req->mutex); } void nuvo_map_mfl_batch_done(struct nuvo_parallel_op *par_ops) { NUVO_ASSERT(!par_ops->status); // TODO error handle->abort mfl? // TODO if in errror mark mfl_req in error // and let the next state handle it struct nuvo_map_free_lun_request *mfl_req = nuvo_containing_object(par_ops, struct nuvo_map_free_lun_request, par_ops); NUVO_LOG(map, 80, "mfl batch done lun:%d lun_state:%d offset:%lu free_offset:%lu", mfl_req->lun->snap_id, mfl_req->lun->lun_state, mfl_req->offset, mfl_req->free_offset); nuvo_parallel_op_destroy(par_ops); nuvo_mutex_lock(&mfl_req->lun->vol->log_volume.space.space_vol_mutex); mfl_req->work_state = MFL_WORK_FREE_ENTRIES; nuvo_vol_needs_work_mfl(&mfl_req->lun->vol->log_volume.space); nuvo_mutex_unlock(&mfl_req->lun->vol->log_volume.space.space_vol_mutex); } void nuvo_map_mfl_fault_in_cb(struct nuvo_map_request *map_req) { struct nuvo_map_free_lun_request *mfl = map_req->tag.ptr; NUVO_ASSERT(map_req->first_map->pinned > 0); struct nuvo_map_track *map = map_req->first_map; NUVO_LOG_COND(map, 80, (!map->base_offset), "FAULT-IN map map :%p state:%d shadow_link:%p mfl:%d media_addr:(%lu:%lu) offset:%lu level:%d map->is_dirty:%d", map, map->state, map->shadow_link, map->mfl, map->map_entry.media_addr.parcel_index, map->map_entry.media_addr.block_offset, map->base_offset, map->level, map->is_dirty); nuvo_parallel_op_done(&mfl->par_ops, map_req->status); } bool nuvo_map_mfl_is_halting(struct nuvo_map_free_lun_request *mfl_req) { NUVO_ASSERT_MUTEX_HELD(&mfl_req->mutex); return ((mfl_req->state == MFL_HALTING)); } bool nuvo_map_mfl_is_halted(struct nuvo_map_free_lun_request *mfl_req) { NUVO_ASSERT_MUTEX_HELD(&mfl_req->mutex); return ((mfl_req->state == MFL_HALTED)); } bool nuvo_map_mfl_is_done(struct nuvo_map_free_lun_request *mfl_req) { NUVO_ASSERT(mfl_req->offset == mfl_req->free_offset); return (mfl_req->offset >= (mfl_req->lun->size / NUVO_BLOCK_SIZE)); } void nuvo_map_mfl_trigger_halting(struct nuvo_map_free_lun_request *mfl_req) { NUVO_ASSERT_MUTEX_HELD(&mfl_req->mutex); NUVO_ASSERT(mfl_req->state == MFL_IN_PROGRESS); // must be in progress mfl_req->state = MFL_HALTING; } void nuvo_map_mfl_wait_for_halt(struct nuvo_map_free_lun_request *mfl_req) { nuvo_cond_wait(&mfl_req->cond, &mfl_req->mutex); } void nuvo_map_mfl_halt(struct nuvo_map_free_lun_request *mfl_req) { NUVO_ASSERT_MUTEX_HELD(&mfl_req->mutex); struct nuvo_lun *lun = mfl_req->lun; struct nuvo_space_vol *space_vol = nuvo_containing_object(mfl_req, struct nuvo_space_vol, mfl_req); if (lun) { nuvo_mutex_lock(&lun->mutex); nuvo_lun_unpin(lun); nuvo_mutex_unlock(&lun->mutex); } mfl_req->state = MFL_HALTED; mfl_req->work_state = MFL_WORK_NONE; NUVO_LOG(space, 25, "mfl halted, wake up the waiter vol uuid:"NUVO_LOG_UUID_FMT, NUVO_LOG_UUID(nuvo_containing_object(space_vol, struct nuvo_vol, log_volume.space)->vs_uuid)); nuvo_cond_signal(&mfl_req->cond); } void nuvo_map_mfl_done(struct nuvo_map_free_lun_request *mfl_req) { struct nuvo_lun *lun = mfl_req->lun; NUVO_LOG(map, 0, "mfl L0 done moving to lun to DELETING_DRAIN lun(%d) state:%d lun->mfl_state:%d offset:%lu", lun->snap_id, lun->lun_state, lun->mfl_state, mfl_req->offset); NUVO_ASSERT(mfl_req->work_state == MFL_WORK_LOAD_MAPS); nuvo_map_mfl_req_reset(mfl_req); nuvo_mutex_lock(&lun->mutex); lun->mfl_state = NUVO_LUN_MFL_CP_PENDING; nuvo_lun_unpin(lun); // done with L0 punching, roll up pending. // Glossary MFL means "map free lun" aka hole punching work. // // Right now, we are done with L0 maps. And now we go from DELETING-> DELETING_DRAIN. // We are yet to free L>0 map blocks. // "Deleting Drain" ensures that no more new GCs would come in. // Ongoing gcs can handle MFLed or to be MFLed blocks. // MFLed blocks are returned success to GC. // Future MFL blocks are just dirted by GC // But since MFL has already dirtied all the L0 maps // L>0 maps must necessarily (including the maps that are marked for rewrite // by GC) should get MFL-ed in the next CP. // CP would roll up the deleting on the map tree. // Deleting drain ->deleted happens when roll up is done. // Setting the state to DELETING drain right after L0 maps are punched, helps space thread // to decide not to pick up the lun again // The downside is that GC will not do any new work on the intemediate maps // But gc would expect the maps to be free/written out by the next cp. // Since we trigger cp after the deleting drain, and since gc would wait // for the next cp to finish, the expectation that maps would be deleted by the next cp // is valid. nuvo_return_t rc = nuvo_lun_state_transition(lun, NUVO_LUN_STATE_DELETING_DRAIN, NUVO_LUN_EXPORT_UNEXPORTED); NUVO_ASSERT(rc == 0); nuvo_mutex_unlock(&lun->mutex); //trigger a CP , cp would roll up the upper level maps. struct nuvo_space_vol *space_vol = &lun->vol->log_volume.space; nuvo_space_trigger_cp(space_vol); // We are done with this lun. // We need to check whether we need to do more luns // So, requeue work to the space thread, who would check for // mfl work for more luns nuvo_mutex_lock(&space_vol->space_vol_mutex); nuvo_vol_new_needs_work_mfl(space_vol); nuvo_mutex_unlock(&space_vol->space_vol_mutex); } void nuvo_map_mfl_work(struct nuvo_map_free_lun_request *req) { //do the hole punching for the maps we loaded if (req->work_state == MFL_WORK_FREE_ENTRIES) { nuvo_map_mfl_free_entries(req); req->work_state = MFL_WORK_LOAD_MAPS; } // we can only halt, pause after calling the "free_entries" above // so that we dont halt/pause holding map pins NUVO_ASSERT(req->work_state == MFL_WORK_LOAD_MAPS); // if we are done , mfl state changes from PROGRESS -> NONE // If a PROGRESS -> HALTING happens in the middle, mfl_done // doesnt touch the state // and HALTING is handled subsequently if (nuvo_map_mfl_is_done(req)) { nuvo_map_mfl_done(req); } // if we are are told to halt, halt nuvo_mutex_lock(&req->mutex); if (nuvo_map_mfl_is_halting(req)) { nuvo_map_mfl_halt(req); } if (nuvo_map_mfl_is_halted(req)) { goto _out; } if (nuvo_map_mfl_need_pausing(req)) { // pause can be only done before loading maps nuvo_map_mfl_pause(req); goto _out; } if (!nuvo_map_mfl_is_paused(req) && (req->work_state == MFL_WORK_LOAD_MAPS)) { //now go and load the next set of maps nuvo_mutex_unlock(&req->mutex); nuvo_map_mfl_load_maps(req); return; } _out: nuvo_mutex_unlock(&req->mutex); return; } void nuvo_map_mfl_load_maps(struct nuvo_map_free_lun_request *mfl_req) { //assert that the map is in i/o done //assert that the offset is a multiple of radix etc nuvo_mutex_lock(&mfl_req->mutex); struct nuvo_parallel_op *par_ops = &mfl_req->par_ops; nuvo_return_t rc = nuvo_parallel_op_init(par_ops); NUVO_ASSERT(!rc); par_ops->callback = nuvo_map_mfl_batch_done; uint64_t lun_offset_max = mfl_req->lun->size / NUVO_BLOCK_SIZE; // load the maps. // the parallel op cb gets called when we are done loading // which would switch us to the "free entries" phase where // we do the free work. // Note: if we are halting, we stop loading maps. // the state machine in mfl_work would handle halting eventually for (unsigned i = 0; (i < NUVO_MFL_BATCH_SIZE && mfl_req->offset < lun_offset_max && !nuvo_map_mfl_is_halting(mfl_req)); i++, mfl_req->offset += NUVO_MAP_RADIX) { struct nuvo_map_request *map_req = &mfl_req->map_reqs[i]; nuvo_map_request_init(map_req, mfl_req->lun, mfl_req->offset, 1); nuvo_map_reserve_sync(map_req); NUVO_ASSERT(!map_req->status); map_req->tag.ptr = mfl_req; map_req->callback = nuvo_map_mfl_fault_in_cb; mfl_req->map_load_cnt++; nuvo_parallel_op_submitting(par_ops); nuvo_mutex_unlock(&mfl_req->mutex); nuvo_map_fault_in(map_req); NUVO_ASSERT(!map_req->status); nuvo_mutex_lock(&mfl_req->mutex); } nuvo_mutex_unlock(&mfl_req->mutex); nuvo_parallel_op_finalize(par_ops); } void nuvo_map_mfl_free_entries(struct nuvo_map_free_lun_request *mfl_req) { NUVO_ASSERT(mfl_req->offset); //if we have something to work, mfl must have loaded some maps NUVO_ASSERT(mfl_req->free_offset <= mfl_req->offset); for (unsigned int i = 0; (i < NUVO_MFL_BATCH_SIZE && mfl_req->free_offset < mfl_req->offset); i++, mfl_req->free_offset += NUVO_MAP_RADIX) { struct nuvo_map_request *map_req = &mfl_req->map_reqs[i]; // hold the vol lock across map lock so that volume cp_gen doesnt change // during map_commit_lock struct nuvo_vol *vol = map_req->lun->vol; nuvo_mutex_lock(&vol->mutex); map_req->cp_commit_gen = vol->log_volume.map_state.checkpoint_gen; nuvo_map_commit_lock(map_req); nuvo_mutex_unlock(&vol->mutex); NUVO_ASSERT(map_req->first_map == map_req->last_map); NUVO_ASSERT(map_req->first_map->level == 0); // assert that the map we are working on is the map we intend to work on NUVO_ASSERT(map_req->first_map->base_offset == mfl_req->free_offset); bool is_dirty = map_mfl_free_entries(map_req->first_map); if (is_dirty) { nuvo_map_mfl_inc_dirty_cnt(mfl_req); } mfl_req->map_mfl_cnt++; nuvo_map_commit_unlock(map_req); } NUVO_ASSERT(mfl_req->offset == mfl_req->free_offset); }
dyzmapl/BumpTop
trunk/win/Source/Includes/QtIncludes/src/3rdparty/webkit/WebCore/ForwardingHeaders/runtime/PrototypeFunction.h
<reponame>dyzmapl/BumpTop #ifndef WebCore_FWD_PrototypeFunction_h #define WebCore_FWD_PrototypeFunction_h #include <JavaScriptCore/PrototypeFunction.h> #endif
albertobarri/idk
tests/nbody-java/java_nio_charset_CharsetEncoder.h
<gh_stars>10-100 #ifndef __JAVA_NIO_CHARSET_CHARSETENCODER__ #define __JAVA_NIO_CHARSET_CHARSETENCODER__ #include "xmlvm.h" // Preprocessor constants for interfaces: #define XMLVM_ITABLE_SIZE_java_nio_charset_CharsetEncoder 0 // Implemented interfaces: // Super Class: #include "java_lang_Object.h" // Circular references: #ifndef XMLVM_FORWARD_DECL_java_lang_CharSequence #define XMLVM_FORWARD_DECL_java_lang_CharSequence XMLVM_FORWARD_DECL(java_lang_CharSequence) #endif #ifndef XMLVM_FORWARD_DECL_java_lang_Class #define XMLVM_FORWARD_DECL_java_lang_Class XMLVM_FORWARD_DECL(java_lang_Class) #endif #ifndef XMLVM_FORWARD_DECL_java_lang_String #define XMLVM_FORWARD_DECL_java_lang_String XMLVM_FORWARD_DECL(java_lang_String) #endif #ifndef XMLVM_FORWARD_DECL_java_nio_ByteBuffer #define XMLVM_FORWARD_DECL_java_nio_ByteBuffer XMLVM_FORWARD_DECL(java_nio_ByteBuffer) #endif #ifndef XMLVM_FORWARD_DECL_java_nio_CharBuffer #define XMLVM_FORWARD_DECL_java_nio_CharBuffer XMLVM_FORWARD_DECL(java_nio_CharBuffer) #endif #ifndef XMLVM_FORWARD_DECL_java_nio_charset_Charset #define XMLVM_FORWARD_DECL_java_nio_charset_Charset XMLVM_FORWARD_DECL(java_nio_charset_Charset) #endif #ifndef XMLVM_FORWARD_DECL_java_nio_charset_CharsetDecoder #define XMLVM_FORWARD_DECL_java_nio_charset_CharsetDecoder XMLVM_FORWARD_DECL(java_nio_charset_CharsetDecoder) #endif #ifndef XMLVM_FORWARD_DECL_java_nio_charset_CoderResult #define XMLVM_FORWARD_DECL_java_nio_charset_CoderResult XMLVM_FORWARD_DECL(java_nio_charset_CoderResult) #endif #ifndef XMLVM_FORWARD_DECL_java_nio_charset_CodingErrorAction #define XMLVM_FORWARD_DECL_java_nio_charset_CodingErrorAction XMLVM_FORWARD_DECL(java_nio_charset_CodingErrorAction) #endif // Class declarations for java.nio.charset.CharsetEncoder XMLVM_DEFINE_CLASS(java_nio_charset_CharsetEncoder, 7, XMLVM_ITABLE_SIZE_java_nio_charset_CharsetEncoder) extern JAVA_OBJECT __CLASS_java_nio_charset_CharsetEncoder; extern JAVA_OBJECT __CLASS_java_nio_charset_CharsetEncoder_1ARRAY; extern JAVA_OBJECT __CLASS_java_nio_charset_CharsetEncoder_2ARRAY; extern JAVA_OBJECT __CLASS_java_nio_charset_CharsetEncoder_3ARRAY; //XMLVM_BEGIN_DECLARATIONS #define __ADDITIONAL_INSTANCE_FIELDS_java_nio_charset_CharsetEncoder //XMLVM_END_DECLARATIONS #define __INSTANCE_FIELDS_java_nio_charset_CharsetEncoder \ __INSTANCE_FIELDS_java_lang_Object; \ struct { \ JAVA_OBJECT cs_; \ JAVA_FLOAT averBytes_; \ JAVA_FLOAT maxBytes_; \ JAVA_OBJECT replace_; \ JAVA_INT status_; \ JAVA_BOOLEAN finished_; \ JAVA_OBJECT malformAction_; \ JAVA_OBJECT unmapAction_; \ JAVA_OBJECT decoder_; \ __ADDITIONAL_INSTANCE_FIELDS_java_nio_charset_CharsetEncoder \ } java_nio_charset_CharsetEncoder struct java_nio_charset_CharsetEncoder { __TIB_DEFINITION_java_nio_charset_CharsetEncoder* tib; struct { __INSTANCE_FIELDS_java_nio_charset_CharsetEncoder; } fields; }; #ifndef XMLVM_FORWARD_DECL_java_nio_charset_CharsetEncoder #define XMLVM_FORWARD_DECL_java_nio_charset_CharsetEncoder typedef struct java_nio_charset_CharsetEncoder java_nio_charset_CharsetEncoder; #endif #define XMLVM_VTABLE_SIZE_java_nio_charset_CharsetEncoder 7 #define XMLVM_VTABLE_IDX_java_nio_charset_CharsetEncoder_encodeLoop___java_nio_CharBuffer_java_nio_ByteBuffer 6 void __INIT_java_nio_charset_CharsetEncoder(); void __INIT_IMPL_java_nio_charset_CharsetEncoder(); void __DELETE_java_nio_charset_CharsetEncoder(void* me, void* client_data); void __INIT_INSTANCE_MEMBERS_java_nio_charset_CharsetEncoder(JAVA_OBJECT me, int derivedClassWillRegisterFinalizer); JAVA_OBJECT __NEW_java_nio_charset_CharsetEncoder(); JAVA_OBJECT __NEW_INSTANCE_java_nio_charset_CharsetEncoder(); JAVA_INT java_nio_charset_CharsetEncoder_GET_READY(); void java_nio_charset_CharsetEncoder_PUT_READY(JAVA_INT v); JAVA_INT java_nio_charset_CharsetEncoder_GET_ONGOING(); void java_nio_charset_CharsetEncoder_PUT_ONGOING(JAVA_INT v); JAVA_INT java_nio_charset_CharsetEncoder_GET_END(); void java_nio_charset_CharsetEncoder_PUT_END(JAVA_INT v); JAVA_INT java_nio_charset_CharsetEncoder_GET_FLUSH(); void java_nio_charset_CharsetEncoder_PUT_FLUSH(JAVA_INT v); JAVA_INT java_nio_charset_CharsetEncoder_GET_INIT(); void java_nio_charset_CharsetEncoder_PUT_INIT(JAVA_INT v); void java_nio_charset_CharsetEncoder___INIT____java_nio_charset_Charset_float_float(JAVA_OBJECT me, JAVA_OBJECT n1, JAVA_FLOAT n2, JAVA_FLOAT n3); void java_nio_charset_CharsetEncoder___INIT____java_nio_charset_Charset_float_float_byte_1ARRAY(JAVA_OBJECT me, JAVA_OBJECT n1, JAVA_FLOAT n2, JAVA_FLOAT n3, JAVA_OBJECT n4); JAVA_FLOAT java_nio_charset_CharsetEncoder_averageBytesPerChar__(JAVA_OBJECT me); JAVA_BOOLEAN java_nio_charset_CharsetEncoder_canEncode___char(JAVA_OBJECT me, JAVA_CHAR n1); JAVA_BOOLEAN java_nio_charset_CharsetEncoder_implCanEncode___java_nio_CharBuffer(JAVA_OBJECT me, JAVA_OBJECT n1); JAVA_BOOLEAN java_nio_charset_CharsetEncoder_canEncode___java_lang_CharSequence(JAVA_OBJECT me, JAVA_OBJECT n1); JAVA_OBJECT java_nio_charset_CharsetEncoder_charset__(JAVA_OBJECT me); JAVA_OBJECT java_nio_charset_CharsetEncoder_encode___java_nio_CharBuffer(JAVA_OBJECT me, JAVA_OBJECT n1); void java_nio_charset_CharsetEncoder_checkCoderResult___java_nio_charset_CoderResult(JAVA_OBJECT me, JAVA_OBJECT n1); JAVA_OBJECT java_nio_charset_CharsetEncoder_allocateMore___java_nio_ByteBuffer(JAVA_OBJECT me, JAVA_OBJECT n1); JAVA_OBJECT java_nio_charset_CharsetEncoder_encode___java_nio_CharBuffer_java_nio_ByteBuffer_boolean(JAVA_OBJECT me, JAVA_OBJECT n1, JAVA_OBJECT n2, JAVA_BOOLEAN n3); // Vtable index: 6 JAVA_OBJECT java_nio_charset_CharsetEncoder_encodeLoop___java_nio_CharBuffer_java_nio_ByteBuffer(JAVA_OBJECT me, JAVA_OBJECT n1, JAVA_OBJECT n2); JAVA_OBJECT java_nio_charset_CharsetEncoder_flush___java_nio_ByteBuffer(JAVA_OBJECT me, JAVA_OBJECT n1); JAVA_OBJECT java_nio_charset_CharsetEncoder_implFlush___java_nio_ByteBuffer(JAVA_OBJECT me, JAVA_OBJECT n1); void java_nio_charset_CharsetEncoder_implOnMalformedInput___java_nio_charset_CodingErrorAction(JAVA_OBJECT me, JAVA_OBJECT n1); void java_nio_charset_CharsetEncoder_implOnUnmappableCharacter___java_nio_charset_CodingErrorAction(JAVA_OBJECT me, JAVA_OBJECT n1); void java_nio_charset_CharsetEncoder_implReplaceWith___byte_1ARRAY(JAVA_OBJECT me, JAVA_OBJECT n1); void java_nio_charset_CharsetEncoder_implReset__(JAVA_OBJECT me); JAVA_BOOLEAN java_nio_charset_CharsetEncoder_isLegalReplacement___byte_1ARRAY(JAVA_OBJECT me, JAVA_OBJECT n1); JAVA_OBJECT java_nio_charset_CharsetEncoder_malformedInputAction__(JAVA_OBJECT me); JAVA_FLOAT java_nio_charset_CharsetEncoder_maxBytesPerChar__(JAVA_OBJECT me); JAVA_OBJECT java_nio_charset_CharsetEncoder_onMalformedInput___java_nio_charset_CodingErrorAction(JAVA_OBJECT me, JAVA_OBJECT n1); JAVA_OBJECT java_nio_charset_CharsetEncoder_onUnmappableCharacter___java_nio_charset_CodingErrorAction(JAVA_OBJECT me, JAVA_OBJECT n1); JAVA_OBJECT java_nio_charset_CharsetEncoder_replacement__(JAVA_OBJECT me); JAVA_OBJECT java_nio_charset_CharsetEncoder_replaceWith___byte_1ARRAY(JAVA_OBJECT me, JAVA_OBJECT n1); JAVA_OBJECT java_nio_charset_CharsetEncoder_reset__(JAVA_OBJECT me); JAVA_OBJECT java_nio_charset_CharsetEncoder_unmappableCharacterAction__(JAVA_OBJECT me); #endif
Pivotal-Field-Engineering/spring-cloud
spring-cloud-cloudfoundry-connector/src/main/java/org/springframework/cloud/cloudfoundry/MysqlServiceInfoCreator.java
package org.springframework.cloud.cloudfoundry; import org.springframework.cloud.service.common.MysqlServiceInfo; /** * * @author <NAME> * */ public class MysqlServiceInfoCreator extends RelationalServiceInfoCreator<MysqlServiceInfo> { public MysqlServiceInfoCreator() { // the literal in the tag is CloudFoundry-specific super(new Tags("mysql"), MysqlServiceInfo.URI_SCHEME); } @Override public MysqlServiceInfo createServiceInfo(String id, String url) { return new MysqlServiceInfo(id, url); } }
bm2-lab/CRISPR-off-target-data-imbalance
scripts_for_improve_CRISTA/detailed/op8b_build_stu_div_cls.py
<reponame>bm2-lab/CRISPR-off-target-data-imbalance import numpy as np from sklearn.ensemble import RandomForestClassifier from sklearn.externals import joblib stu_common = joblib.load('data/stu_common_div_tr.pkl') stu_unique = joblib.load('data/stu_unique_div_tr.pkl') params = joblib.load('params.pkl') del params['criterion'] dt_common = dict.fromkeys(stu_common.keys()) dt_unique = dict.fromkeys(stu_unique.keys()) for study in stu_common: xtr_lst, ytr_lst, xte, yte = stu_common[study] ypred_lst = [] print(f'{study} begins:') for i in range(100): print(f'{study}: {i+1} / 100') xtr = xtr_lst[i] ytr = ytr_lst[i] ytr[ytr > 0] = 1 rf = RandomForestClassifier(**params) rf.fit(xtr, ytr) ypred = rf.predict(xte) ypred_lst.append(ypred) ypred_m = np.mean(np.vstack(ypred_lst), axis=0) dt_common[study] = (yte, ypred_m) joblib.dump(dt_common, 'result/stu_common_div_cls_res.pkl') for study in stu_unique: xtr_lst, ytr_lst, xte, yte = stu_unique[study] ypred_lst = [] print(f'{study} begins:') for i in range(100): print(f'{study}: {i+1} / 100') xtr = xtr_lst[i] ytr = ytr_lst[i] ytr[ytr > 0] = 1 rf = RandomForestClassifier(**params) rf.fit(xtr, ytr) ypred = rf.predict(xte) ypred_lst.append(ypred) ypred_m = np.mean(np.vstack(ypred_lst), axis=0) dt_unique[study] = (yte, ypred_m) joblib.dump(dt_unique, 'result/stu_unique_div_cls_res.pkl')
jbschroder/pymgrit
examples/example_dahlquist.py
""" Use PyMGRIT's routines simple_setup_problem() and Mgrit() to generate a multigrid hierarchy and MGRIT solver and run the solver routine mgrit.solve(). """ from pymgrit.dahlquist.dahlquist import Dahlquist from pymgrit.core.simple_setup_problem import simple_setup_problem from pymgrit.core.mgrit import Mgrit def main(): # Create Dahlquist's test problem with 101 time steps in the interval [0, 5] dahlquist = Dahlquist(t_start=0, t_stop=5, nt=101) # Construct a two-level multigrid hierarchy for the test problem using a coarsening factor of 2 dahlquist_multilevel_structure = simple_setup_problem(problem=dahlquist, level=2, coarsening=2) # Set up the MGRIT solver for the test problem and set the solver tolerance to 1e-10 mgrit = Mgrit(problem=dahlquist_multilevel_structure, tol=1e-10) # Solve the test problem info = mgrit.solve() if __name__ == '__main__': main()
alexsandertech/minimalist-minesweeper-javascrip
js/modules/main-loop-modules/listenerButtons.js
import { addListenerClickLEFT } from "../general-modules/listenerClick.js"; import { addListenerClickRIGHT } from "../general-modules/listenerClick.js"; import { removeListenerClickLEFT } from "../general-modules/listenerClick.js"; import { removeListenerClickRIGHT } from "../general-modules/listenerClick.js"; export async function addListenerButtons(board) { console.log(" >> Inicializing addListenerClick"); addListenerClickLEFT(".btn-back"); addListenerClickLEFT(".btn-restart"); await addListenerCells(board.column, board.row, board.view); console.log(" << Finalizing addListenerClick"); } export async function removeListenerButtons(board) { console.log(" >> Inicializing listenerButtons"); removeListenerClickLEFT(".btn-back"); removeListenerClickLEFT(".btn-restart"); await removeListenerCells(board.column, board.row, board.view); console.log(" << Finalizing listenerButtons"); } async function addListenerCells(column, row, view){ for(let i=0; i<row; i++) for(let j=0; j<column; j++) if(view[i][j]!=null) { addListenerClickLEFT(".cell-"+i+"-"+j); addListenerClickRIGHT(".cell-"+i+"-"+j); } } async function removeListenerCells(column, row, view){ for(let i=0; i<row; i++) for(let j=0; j<column; j++) if(view[i][j]!=null) { removeListenerClickLEFT(".cell-"+i+"-"+j); removeListenerClickRIGHT(".cell-"+i+"-"+j); } }
edugonza/PADAS
src/org/processmining/database/redologs/common/IdBatchDispenser.java
<gh_stars>1-10 package org.processmining.database.redologs.common; public class IdBatchDispenser { private int startId = 0; private int batchSize = 100; public IdBatchDispenser(int startId, int batchSize) { this.startId = startId; this.batchSize = batchSize; } public IdDispenser getDispenser() { IdDispenser idd = new IdDispenser(this); return idd; } protected synchronized int[] getBatch() { int[] b = new int[] {startId,startId+batchSize}; startId += batchSize + 1; return b; } }
icaas/DataX
common/src/test/java/com/alibaba/datax/common/exception/FakeErrorCode.java
package com.alibaba.datax.common.exception; import com.alibaba.datax.common.spi.ErrorCode; public enum FakeErrorCode implements ErrorCode { FAKE_ERROR_CODE_ONLY_FOR_TEST_00("FakeErrorCode-00", "only a test, FakeErrorCode."), FAKE_ERROR_CODE_ONLY_FOR_TEST_01( "FakeErrorCode-01", "only a test, FakeErrorCode,测试中文."), ; private final String code; private final String description; private FakeErrorCode(String code, String description) { this.code = code; this.description = description; } @Override public String getCode() { return this.code; } @Override public String getDescription() { return this.description; } @Override public String toString() { return String.format("Code:[%s], Describe:[%s]", this.code, this.description); } }
julianmichael/qasrl-roles
qasrl-roles/modeling/src-js/package.scala
<gh_stars>1-10 package qasrl.roles.modeling trait PackagePlatformExtensions { trait TFIDFPlatformExtensions }
Rafsanjani/media
libraries/transformer/src/main/java/androidx/media3/transformer/VideoEncoderSettings.java
<gh_stars>100-1000 /* * Copyright 2022 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.media3.transformer; import static androidx.media3.common.util.Assertions.checkArgument; import static java.lang.annotation.ElementType.TYPE_USE; import android.annotation.SuppressLint; import android.media.MediaCodecInfo; import android.media.MediaFormat; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import androidx.media3.common.Format; import androidx.media3.common.util.UnstableApi; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** Represents the video encoder settings. */ @UnstableApi public final class VideoEncoderSettings { /** A value for various fields to indicate that the field's value is unknown or not applicable. */ public static final int NO_VALUE = Format.NO_VALUE; /** The default encoding color profile. */ public static final int DEFAULT_COLOR_PROFILE = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface; /** The default I-frame interval in seconds. */ public static final float DEFAULT_I_FRAME_INTERVAL_SECONDS = 1.0f; /** A default {@link VideoEncoderSettings}. */ public static final VideoEncoderSettings DEFAULT = new Builder().build(); /** * The allowed values for {@code bitrateMode}, one of * * <ul> * <li>Constant quality: {@link MediaCodecInfo.EncoderCapabilities#BITRATE_MODE_CQ}. * <li>Variable bitrate: {@link MediaCodecInfo.EncoderCapabilities#BITRATE_MODE_VBR}. * <li>Constant bitrate: {@link MediaCodecInfo.EncoderCapabilities#BITRATE_MODE_CBR}. * <li>Constant bitrate with frame drops: {@link * MediaCodecInfo.EncoderCapabilities#BITRATE_MODE_CBR_FD}, available from API31. * </ul> */ @SuppressLint("InlinedApi") @Documented @Retention(RetentionPolicy.SOURCE) @Target(TYPE_USE) @IntDef({ MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CQ, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR_FD }) public @interface BitrateMode {} /** Builds {@link VideoEncoderSettings} instances. */ public static final class Builder { private int bitrate; private @BitrateMode int bitrateMode; private int profile; private int level; private int colorProfile; private float iFrameIntervalSeconds; private int operatingRate; private int priority; /** Creates a new instance. */ public Builder() { this.bitrate = NO_VALUE; this.bitrateMode = MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR; this.profile = NO_VALUE; this.level = NO_VALUE; this.colorProfile = DEFAULT_COLOR_PROFILE; this.iFrameIntervalSeconds = DEFAULT_I_FRAME_INTERVAL_SECONDS; this.operatingRate = NO_VALUE; this.priority = NO_VALUE; } private Builder(VideoEncoderSettings videoEncoderSettings) { this.bitrate = videoEncoderSettings.bitrate; this.bitrateMode = videoEncoderSettings.bitrateMode; this.profile = videoEncoderSettings.profile; this.level = videoEncoderSettings.level; this.colorProfile = videoEncoderSettings.colorProfile; this.iFrameIntervalSeconds = videoEncoderSettings.iFrameIntervalSeconds; this.operatingRate = videoEncoderSettings.operatingRate; this.priority = videoEncoderSettings.priority; } /** * Sets {@link VideoEncoderSettings#bitrate}. The default value is {@link #NO_VALUE}. * * @param bitrate The {@link VideoEncoderSettings#bitrate}. * @return This builder. */ public Builder setBitrate(int bitrate) { this.bitrate = bitrate; return this; } /** * Sets {@link VideoEncoderSettings#bitrateMode}. The default value is {@code * MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR}. * * <p>Only {@link MediaCodecInfo.EncoderCapabilities#BITRATE_MODE_VBR} and {@link * MediaCodecInfo.EncoderCapabilities#BITRATE_MODE_CBR} are allowed. * * @param bitrateMode The {@link VideoEncoderSettings#bitrateMode}. * @return This builder. */ public Builder setBitrateMode(@BitrateMode int bitrateMode) { checkArgument( bitrateMode == MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR || bitrateMode == MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR); this.bitrateMode = bitrateMode; return this; } /** * Sets {@link VideoEncoderSettings#profile} and {@link VideoEncoderSettings#level}. The default * values are both {@link #NO_VALUE}. * * <p>The value must be one of the values defined in {@link MediaCodecInfo.CodecProfileLevel}, * or {@link #NO_VALUE}. * * <p>Profile and level settings will be ignored when using {@link DefaultEncoderFactory} and * encoding to H264. * * @param encodingProfile The {@link VideoEncoderSettings#profile}. * @param encodingLevel The {@link VideoEncoderSettings#level}. * @return This builder. */ public Builder setEncodingProfileLevel(int encodingProfile, int encodingLevel) { this.profile = encodingProfile; this.level = encodingLevel; return this; } /** * Sets {@link VideoEncoderSettings#colorProfile}. The default value is {@link * #DEFAULT_COLOR_PROFILE}. * * <p>The value must be one of the {@code COLOR_*} constants defined in {@link * MediaCodecInfo.CodecCapabilities}. * * @param colorProfile The {@link VideoEncoderSettings#colorProfile}. * @return This builder. */ public Builder setColorProfile(int colorProfile) { this.colorProfile = colorProfile; return this; } /** * Sets {@link VideoEncoderSettings#iFrameIntervalSeconds}. The default value is {@link * #DEFAULT_I_FRAME_INTERVAL_SECONDS}. * * @param iFrameIntervalSeconds The {@link VideoEncoderSettings#iFrameIntervalSeconds}. * @return This builder. */ public Builder setiFrameIntervalSeconds(float iFrameIntervalSeconds) { this.iFrameIntervalSeconds = iFrameIntervalSeconds; return this; } /** * Sets encoding operating rate and priority. The default values are {@link #NO_VALUE}. * * @param operatingRate The {@link MediaFormat#KEY_OPERATING_RATE operating rate}. * @param priority The {@link MediaFormat#KEY_PRIORITY priority}. * @return This builder. */ @VisibleForTesting public Builder setEncoderPerformanceParameters(int operatingRate, int priority) { this.operatingRate = operatingRate; this.priority = priority; return this; } /** Builds the instance. */ public VideoEncoderSettings build() { return new VideoEncoderSettings( bitrate, bitrateMode, profile, level, colorProfile, iFrameIntervalSeconds, operatingRate, priority); } } /** The encoding bitrate. */ public final int bitrate; /** One of {@linkplain BitrateMode the allowed modes}. */ public final @BitrateMode int bitrateMode; /** The encoding profile. */ public final int profile; /** The encoding level. */ public final int level; /** The encoding color profile. */ public final int colorProfile; /** The encoding I-Frame interval in seconds. */ public final float iFrameIntervalSeconds; /** The encoder {@link MediaFormat#KEY_OPERATING_RATE operating rate}. */ public final int operatingRate; /** The encoder {@link MediaFormat#KEY_PRIORITY priority}. */ public final int priority; private VideoEncoderSettings( int bitrate, int bitrateMode, int profile, int level, int colorProfile, float iFrameIntervalSeconds, int operatingRate, int priority) { this.bitrate = bitrate; this.bitrateMode = bitrateMode; this.profile = profile; this.level = level; this.colorProfile = colorProfile; this.iFrameIntervalSeconds = iFrameIntervalSeconds; this.operatingRate = operatingRate; this.priority = priority; } /** * Returns a {@link VideoEncoderSettings.Builder} initialized with the values of this instance. */ public Builder buildUpon() { return new Builder(this); } @Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (!(o instanceof VideoEncoderSettings)) { return false; } VideoEncoderSettings that = (VideoEncoderSettings) o; return bitrate == that.bitrate && bitrateMode == that.bitrateMode && profile == that.profile && level == that.level && colorProfile == that.colorProfile && iFrameIntervalSeconds == that.iFrameIntervalSeconds && operatingRate == that.operatingRate && priority == that.priority; } @Override public int hashCode() { int result = 7; result = 31 * result + bitrate; result = 31 * result + bitrateMode; result = 31 * result + profile; result = 31 * result + level; result = 31 * result + colorProfile; result = 31 * result + Float.floatToIntBits(iFrameIntervalSeconds); result = 31 * result + operatingRate; result = 31 * result + priority; return result; } }
muratcoskunn/dcl350-2022-mar-10-main
hr-domain/src/com/example/hr/domain/FullName.java
package com.example.hr.domain; import java.util.Objects; import com.example.ddd.annotations.ValueObject; @ValueObject public final class FullName { private final String firstName; private final String lastName; public static FullName of(String firstName, String lastName) { Objects.requireNonNull(firstName); Objects.requireNonNull(lastName); if (!firstName.matches("^[a-z]{3,}$")) throw new IllegalArgumentException("Firstname must have at least 3 alphabet chars."); if (!lastName.matches("^[a-z]{2,}$")) throw new IllegalArgumentException("Lastname must have at least 2 alphabet chars."); return new FullName(firstName, lastName); } private FullName(String firstName, String lastName) { this.firstName = firstName; this.lastName = lastName; } public String getFirstName() { return firstName; } public String getLastName() { return lastName; } @Override public int hashCode() { return Objects.hash(firstName, lastName); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; FullName other = (FullName) obj; return Objects.equals(firstName, other.firstName) && Objects.equals(lastName, other.lastName); } @Override public String toString() { return "FullName [firstName=" + firstName + ", lastName=" + lastName + "]"; } }
mforbes/readthedocs.org
readthedocs/oauth/migrations/0013_create_new_table_for_remote_repository_normalization.py
<reponame>mforbes/readthedocs.org # Generated by Django 2.2.17 on 2020-12-21 18:16 from django.conf import settings from django.db import migrations, models import django.db.models.deletion import django_extensions.db.fields class Migration(migrations.Migration): dependencies = [ ('socialaccount', '0003_extra_data_default_dict'), ('projects', '0067_change_max_length_feature_id'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('oauth', '0012_create_new_table_for_remote_organization_normalization'), ] operations = [ migrations.CreateModel( name='RemoteRepository', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), ('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), ('name', models.CharField(max_length=255, verbose_name='Name')), ('full_name', models.CharField(db_index=True, max_length=255, verbose_name='Full Name')), ('description', models.TextField(blank=True, help_text='Description of the project', null=True, verbose_name='Description')), ('avatar_url', models.URLField(blank=True, null=True, verbose_name='Owner avatar image URL')), ('ssh_url', models.URLField(blank=True, max_length=512, validators=[django.core.validators.URLValidator(schemes=['ssh'])], verbose_name='SSH URL')), ('clone_url', models.URLField(blank=True, max_length=512, validators=[django.core.validators.URLValidator(schemes=['http', 'https', 'ssh', 'git', 'svn'])], verbose_name='Repository clone URL')), ('html_url', models.URLField(blank=True, null=True, verbose_name='HTML URL')), ('private', models.BooleanField(default=False, verbose_name='Private repository')), ('vcs', models.CharField(blank=True, choices=[('git', 'Git'), ('svn', 'Subversion'), ('hg', 'Mercurial'), ('bzr', 'Bazaar')], max_length=200, verbose_name='vcs')), ('default_branch', models.CharField(blank=True, max_length=150, null=True, verbose_name='Default branch of the repository')), ('remote_id', models.CharField(db_index=True, max_length=128)), ('vcs_provider', models.CharField(choices=[('github', 'GitHub'), ('gitlab', 'GitLab'), ('bitbucket', 'Bitbucket')], max_length=32, verbose_name='VCS provider')), ('organization', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='repositories', to='oauth.RemoteOrganization', verbose_name='Organization')), ('project', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='remote_repository', to='projects.Project')), ], options={ 'verbose_name_plural': 'remote repositories', 'db_table': 'oauth_remoterepository_2020', 'ordering': ['full_name'], }, ), migrations.CreateModel( name='RemoteRepositoryRelation', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), ('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), ('admin', models.BooleanField(default=False, verbose_name='Has admin privilege')), ('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='remote_repository_relations', to='socialaccount.SocialAccount', verbose_name='Connected account')), ('remote_repository', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='remote_repository_relations', to='oauth.RemoteRepository')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='remote_repository_relations', to=settings.AUTH_USER_MODEL)), ], options={ 'unique_together': {('remote_repository', 'account')}, }, ), migrations.AddField( model_name='remoterepository', name='users', field=models.ManyToManyField(related_name='oauth_repositories', through='oauth.RemoteRepositoryRelation', to=settings.AUTH_USER_MODEL, verbose_name='Users'), ), migrations.AlterUniqueTogether( name='remoterepository', unique_together={('remote_id', 'vcs_provider')}, ), ]
rjw57/tiw-computer
emulator/src/devices/bus/einstein/pipe/tk02.cpp
// license: GPL-2.0+ // copyright-holders: <NAME>, <NAME> /*************************************************************************** TK02 80 Column Monochrome Unit ***************************************************************************/ #include "emu.h" #include "tk02.h" #include "screen.h" //************************************************************************** // DEVICE DEFINITIONS //************************************************************************** DEFINE_DEVICE_TYPE(TK02_80COL, tk02_device, "tk02", "TK02 80 Column Monochrome Unit") //------------------------------------------------- // device_address_map //------------------------------------------------- ADDRESS_MAP_START(tk02_device::map) // AM_RANGE(0x00, 0x07) AM_SELECT(0xff00) AM_READWRITE(ram_r, ram_w) // no AM_SELECT (or AM_MASK) support here AM_RANGE(0x08, 0x08) AM_MIRROR(0xff00) AM_DEVWRITE("crtc", mc6845_device, address_w) AM_RANGE(0x09, 0x09) AM_MIRROR(0xff00) AM_DEVWRITE("crtc", mc6845_device, register_w) AM_RANGE(0x0c, 0x0c) AM_MIRROR(0xff00) AM_READ(status_r) ADDRESS_MAP_END //------------------------------------------------- // rom_region - device-specific ROM region //------------------------------------------------- ROM_START( tk02 ) ROM_REGION(0x2000, "gfx", 0) ROM_LOAD("tk02-v100.bin", 0x0000, 0x2000, CRC(ad3c4346) SHA1(cd57e630371b4d0314e3f15693753fb195c7257d)) ROM_END const tiny_rom_entry *tk02_device::device_rom_region() const { return ROM_NAME( tk02 ); } //------------------------------------------------- // input_ports - device-specific input ports //------------------------------------------------- static INPUT_PORTS_START( tk02_links ) PORT_START("M001") PORT_DIPUNUSED_DIPLOC(0x01, 0x01, "M001:1") PORT_START("M002") PORT_DIPNAME(0x01, 0x00, "TV Standard") PORT_DIPLOCATION("M002:1") PORT_DIPSETTING(0x00, "625 lines/50 Hz") PORT_DIPSETTING(0x01, "525 lines/60 Hz") PORT_START("M003") PORT_DIPNAME(0x01, 0x00, "Startup Mode") PORT_DIPLOCATION("M003:1") PORT_DIPSETTING(0x00, "Normal") PORT_DIPSETTING(0x01, "Automatic 80 Column") PORT_START("M004") PORT_DIPNAME(0x01, 0x00, "Character Set") PORT_DIPLOCATION("M004:1") PORT_DIPSETTING(0x00, "Modified") PORT_DIPSETTING(0x01, "Normal") INPUT_PORTS_END ioport_constructor tk02_device::device_input_ports() const { return INPUT_PORTS_NAME( tk02_links ); } //------------------------------------------------- // gfx_layout - only for the char viewer //------------------------------------------------- static const gfx_layout tk02_charlayout = { 8, 10, 256, 1, { 0 }, { 0, 1, 2, 3, 4, 5, 6, 7 }, { 0*8, 1*8, 2*8, 3*8, 4*8, 5*8, 6*8, 7*8, 0x800*8, 0x801*8 }, 8*8 }; static GFXDECODE_START( tk02 ) GFXDECODE_ENTRY("gfx", 0x0000, tk02_charlayout, 0, 1) GFXDECODE_ENTRY("gfx", 0x1000, tk02_charlayout, 0, 1) GFXDECODE_END //------------------------------------------------- // device_add_mconfig - add device configuration //------------------------------------------------- MACHINE_CONFIG_START(tk02_device::device_add_mconfig) MCFG_SCREEN_ADD_MONOCHROME("mono", RASTER, rgb_t::green()) MCFG_SCREEN_RAW_PARAMS(XTAL(8'000'000) * 2, 1024, 0, 640, 312, 0, 250) MCFG_SCREEN_UPDATE_DEVICE("crtc", mc6845_device, screen_update) MCFG_PALETTE_ADD_MONOCHROME("palette") MCFG_GFXDECODE_ADD("gfxdecode", "palette", tk02) MCFG_MC6845_ADD("crtc", MC6845, "mono", XTAL(8'000'000) / 4) MCFG_MC6845_SHOW_BORDER_AREA(false) MCFG_MC6845_CHAR_WIDTH(8) MCFG_MC6845_UPDATE_ROW_CB(tk02_device, crtc_update_row) MCFG_MC6845_OUT_DE_CB(WRITELINE(tk02_device, de_w)) MCFG_TATUNG_PIPE_ADD("pipe") MACHINE_CONFIG_END //************************************************************************** // LIVE DEVICE //************************************************************************** //------------------------------------------------- // tk02_device - constructor //------------------------------------------------- tk02_device::tk02_device(const machine_config &mconfig, const char *tag, device_t *owner, uint32_t clock) : device_t(mconfig, TK02_80COL, tag, owner, clock), device_tatung_pipe_interface(mconfig, *this), m_pipe(*this, "pipe"), m_crtc(*this, "crtc"), m_palette(*this, "palette"), m_gfx(*this, "gfx"), m_links(*this, "M00%u", 1), m_de(0) { } //------------------------------------------------- // device_start - device-specific startup //------------------------------------------------- void tk02_device::device_start() { // setup ram m_ram = std::make_unique<uint8_t[]>(0x800); memset(m_ram.get(), 0xff, 0x800); // register for save states save_pointer(NAME(m_ram.get()), 0x800); save_item(NAME(m_de)); } //------------------------------------------------- // device_reset - device-specific reset //------------------------------------------------- void tk02_device::device_reset() { m_pipe->set_io_space(&io_space()); m_pipe->set_program_space(&program_space()); io_space().install_device(0x40, 0x4f, *this, &tk02_device::map); io_space().install_readwrite_handler(0x40, 0x47, 0, 0, 0xff00, read8_delegate(FUNC(tk02_device::ram_r), this), write8_delegate(FUNC(tk02_device::ram_w), this)); } //************************************************************************** // IMPLEMENTATION //************************************************************************** MC6845_UPDATE_ROW( tk02_device::crtc_update_row ) { const pen_t *pen = m_palette->pens(); for (int i = 0; i < x_count; i++) { uint8_t code = m_ram[(ma + i) & 0x07ff]; // 12-------------- link M004 // --11------------ ra3 // ----109876543--- data from ram // -------------210 ra0..2 uint8_t data = m_gfx->as_u8((m_links[3]->read() << 12) | ((ra & 0x08) << 8) | (code << 3) | (ra & 0x07)); if (i == cursor_x) data ^= 0xff; bitmap.pix32(y, i * 8 + 0) = pen[BIT(data, 7)]; bitmap.pix32(y, i * 8 + 1) = pen[BIT(data, 6)]; bitmap.pix32(y, i * 8 + 2) = pen[BIT(data, 5)]; bitmap.pix32(y, i * 8 + 3) = pen[BIT(data, 4)]; bitmap.pix32(y, i * 8 + 4) = pen[BIT(data, 3)]; bitmap.pix32(y, i * 8 + 5) = pen[BIT(data, 2)]; bitmap.pix32(y, i * 8 + 6) = pen[BIT(data, 1)]; bitmap.pix32(y, i * 8 + 7) = pen[BIT(data, 0)]; } } WRITE_LINE_MEMBER( tk02_device::de_w ) { m_de = state; } // lower 3 bits of address define a 256-byte "row" // upper 8 bits define the offset in the row READ8_MEMBER( tk02_device::ram_r ) { return m_ram[((offset & 0x07) << 8) | ((offset >> 8) & 0xff)]; } WRITE8_MEMBER( tk02_device::ram_w ) { m_ram[((offset & 0x07) << 8) | ((offset >> 8) & 0xff)] = data; } READ8_MEMBER( tk02_device::status_r ) { // 7654---- unused // ----3--- link M001 // -----2-- link M002 // ------1- link M003 // -------0 mc6845 display enabled uint8_t data = 0xf0; data |= m_links[0]->read() << 3; data |= m_links[1]->read() << 2; data |= m_links[2]->read() << 1; data |= m_de << 0; return data; }
LeeWongSnail/SourceCode_ReadingNote
xnu-2782.1.97/bsd/kern/pthread_shims.c
/* * Copyright (c) 2012 Apple Inc. All rights reserved. * * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ * * This file contains Original Code and/or Modifications of Original Code * as defined in and that are subject to the Apple Public Source License * Version 2.0 (the 'License'). You may not use this file except in * compliance with the License. The rights granted to you under the License * may not be used to create, or enable the creation or redistribution of, * unlawful or unlicensed copies of an Apple operating system, or to * circumvent, violate, or enable the circumvention or violation of, any * terms of an Apple operating system software license agreement. * * Please obtain a copy of the License at * http://www.opensource.apple.com/apsl/ and read it before using this file. * * The Original Code and all software distributed under the License are * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. * Please see the License for the specific language governing rights and * limitations under the License. * * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ */ #define PTHREAD_INTERNAL 1 #include <kern/debug.h> #include <kern/mach_param.h> #include <kern/sched_prim.h> #include <kern/task.h> #include <kern/thread.h> #include <kern/affinity.h> #include <kern/zalloc.h> #include <machine/machine_routines.h> #include <mach/task.h> #include <mach/thread_act.h> #include <sys/param.h> #include <sys/pthread_shims.h> #include <sys/proc_internal.h> #include <sys/sysproto.h> #include <sys/systm.h> #include <vm/vm_map.h> #include <vm/vm_protos.h> /* version number of the in-kernel shims given to pthread.kext */ #define PTHREAD_SHIMS_VERSION 1 /* on arm, the callbacks function has two #ifdef arm ponters */ #define PTHREAD_CALLBACK_MEMBER ml_get_max_cpus /* compile time asserts to check the length of structures in pthread_shims.h */ char pthread_functions_size_compile_assert[(sizeof(struct pthread_functions_s) - offsetof(struct pthread_functions_s, psynch_rw_yieldwrlock) - sizeof(void*)) == (sizeof(void*) * 100) ? 1 : -1]; char pthread_callbacks_size_compile_assert[(sizeof(struct pthread_callbacks_s) - offsetof(struct pthread_callbacks_s, PTHREAD_CALLBACK_MEMBER) - sizeof(void*)) == (sizeof(void*) * 100) ? 1 : -1]; /* old pthread code had definitions for these as they don't exist in headers */ extern kern_return_t mach_port_deallocate(ipc_space_t, mach_port_name_t); extern kern_return_t semaphore_signal_internal_trap(mach_port_name_t); #define PTHREAD_STRUCT_ACCESSOR(get, set, rettype, structtype, member) \ static rettype \ get(structtype x) { \ return (x)->member; \ } \ static void \ set(structtype x, rettype y) { \ (x)->member = y; \ } PTHREAD_STRUCT_ACCESSOR(proc_get_threadstart, proc_set_threadstart, user_addr_t, struct proc*, p_threadstart); PTHREAD_STRUCT_ACCESSOR(proc_get_pthsize, proc_set_pthsize, int, struct proc*, p_pthsize); PTHREAD_STRUCT_ACCESSOR(proc_get_wqthread, proc_set_wqthread, user_addr_t, struct proc*, p_wqthread); PTHREAD_STRUCT_ACCESSOR(proc_get_targconc, proc_set_targconc, user_addr_t, struct proc*, p_targconc); PTHREAD_STRUCT_ACCESSOR(proc_get_stack_addr_hint, proc_set_stack_addr_hint, user_addr_t, struct proc *, p_stack_addr_hint); PTHREAD_STRUCT_ACCESSOR(proc_get_dispatchqueue_offset, proc_set_dispatchqueue_offset, uint64_t, struct proc*, p_dispatchqueue_offset); PTHREAD_STRUCT_ACCESSOR(proc_get_dispatchqueue_serialno_offset, proc_set_dispatchqueue_serialno_offset, uint64_t, struct proc*, p_dispatchqueue_serialno_offset); PTHREAD_STRUCT_ACCESSOR(proc_get_pthread_tsd_offset, proc_set_pthread_tsd_offset, uint32_t, struct proc *, p_pth_tsd_offset); PTHREAD_STRUCT_ACCESSOR(proc_get_wqptr, proc_set_wqptr, void*, struct proc*, p_wqptr); PTHREAD_STRUCT_ACCESSOR(proc_get_wqsize, proc_set_wqsize, int, struct proc*, p_wqsize); PTHREAD_STRUCT_ACCESSOR(proc_get_pthhash, proc_set_pthhash, void*, struct proc*, p_pthhash); PTHREAD_STRUCT_ACCESSOR(uthread_get_threadlist, uthread_set_threadlist, void*, struct uthread*, uu_threadlist); PTHREAD_STRUCT_ACCESSOR(uthread_get_sigmask, uthread_set_sigmask, sigset_t, struct uthread*, uu_sigmask); PTHREAD_STRUCT_ACCESSOR(uthread_get_returnval, uthread_set_returnval, int, struct uthread*, uu_rval[0]); static void pthread_returning_to_userspace(void) { thread_exception_return(); } static uint32_t get_task_threadmax(void) { return task_threadmax; } static task_t proc_get_task(struct proc *p) { return p->task; } static lck_spin_t* proc_get_wqlockptr(struct proc *p) { return &(p->p_wqlock); } static boolean_t* proc_get_wqinitingptr(struct proc *p) { return &(p->p_wqiniting); } static uint64_t proc_get_register(struct proc *p) { return (p->p_lflag & P_LREGISTER); } static void proc_set_register(struct proc *p) { proc_setregister(p); } static void* uthread_get_uukwe(struct uthread *t) { return &t->uu_kevent.uu_kwe; } static int uthread_is_cancelled(struct uthread *t) { return (t->uu_flag & (UT_CANCELDISABLE | UT_CANCEL | UT_CANCELED)) == UT_CANCEL; } static vm_map_t _current_map(void) { return current_map(); } static boolean_t qos_main_thread_active(void) { return TRUE; } static int proc_usynch_get_requested_thread_qos(struct uthread *uth) { task_t task = current_task(); thread_t thread = uth ? uth->uu_thread : current_thread(); int requested_qos; requested_qos = proc_get_task_policy(task, thread, TASK_POLICY_ATTRIBUTE, TASK_POLICY_QOS); /* * For the purposes of userspace synchronization, it doesn't make sense to place an override of UNSPECIFIED * on another thread, if the current thread doesn't have any QoS set. In these cases, upgrade to * THREAD_QOS_USER_INTERACTIVE. */ if (requested_qos == THREAD_QOS_UNSPECIFIED) { requested_qos = THREAD_QOS_USER_INTERACTIVE; } return requested_qos; } static boolean_t proc_usynch_thread_qos_add_override(struct uthread *uth, uint64_t tid, int override_qos, boolean_t first_override_for_resource) { task_t task = current_task(); thread_t thread = uth ? uth->uu_thread : THREAD_NULL; return proc_thread_qos_add_override(task, thread, tid, override_qos, first_override_for_resource); } static boolean_t proc_usynch_thread_qos_remove_override(struct uthread *uth, uint64_t tid) { task_t task = current_task(); thread_t thread = uth ? uth->uu_thread : THREAD_NULL; return proc_thread_qos_remove_override(task, thread, tid); } /* kernel (core) to kext shims */ void pthread_init(void) { if (!pthread_functions) { panic("pthread kernel extension not loaded (function table is NULL)."); } pthread_functions->pthread_init(); } int fill_procworkqueue(proc_t p, struct proc_workqueueinfo * pwqinfo) { return pthread_functions->fill_procworkqueue(p, pwqinfo); } void workqueue_init_lock(proc_t p) { pthread_functions->workqueue_init_lock(p); } void workqueue_destroy_lock(proc_t p) { pthread_functions->workqueue_destroy_lock(p); } void workqueue_exit(struct proc *p) { pthread_functions->workqueue_exit(p); } void workqueue_mark_exiting(struct proc *p) { pthread_functions->workqueue_mark_exiting(p); } void workqueue_thread_yielded(void) { pthread_functions->workqueue_thread_yielded(); } sched_call_t workqueue_get_sched_callback(void) { if (pthread_functions->workqueue_get_sched_callback) { return pthread_functions->workqueue_get_sched_callback(); } return NULL; } void pth_proc_hashinit(proc_t p) { pthread_functions->pth_proc_hashinit(p); } void pth_proc_hashdelete(proc_t p) { pthread_functions->pth_proc_hashdelete(p); } /* syscall shims */ int bsdthread_create(struct proc *p, struct bsdthread_create_args *uap, user_addr_t *retval) { return pthread_functions->bsdthread_create(p, uap->func, uap->func_arg, uap->stack, uap->pthread, uap->flags, retval); } int bsdthread_register(struct proc *p, struct bsdthread_register_args *uap, __unused int32_t *retval) { if (pthread_functions->version >= 1) { return pthread_functions->bsdthread_register2(p, uap->threadstart, uap->wqthread, uap->flags, uap->stack_addr_hint, uap->targetconc_ptr, uap->dispatchqueue_offset, uap->tsd_offset, retval); } else { return pthread_functions->bsdthread_register(p, uap->threadstart, uap->wqthread, uap->flags, uap->stack_addr_hint, uap->targetconc_ptr, uap->dispatchqueue_offset, retval); } } int bsdthread_terminate(struct proc *p, struct bsdthread_terminate_args *uap, int32_t *retval) { return pthread_functions->bsdthread_terminate(p, uap->stackaddr, uap->freesize, uap->port, uap->sem, retval); } int bsdthread_ctl(struct proc *p, struct bsdthread_ctl_args *uap, int *retval) { return pthread_functions->bsdthread_ctl(p, uap->cmd, uap->arg1, uap->arg2, uap->arg3, retval); } int thread_selfid(struct proc *p, __unused struct thread_selfid_args *uap, uint64_t *retval) { return pthread_functions->thread_selfid(p, retval); } int workq_kernreturn(struct proc *p, struct workq_kernreturn_args *uap, int32_t *retval) { return pthread_functions->workq_kernreturn(p, uap->options, uap->item, uap->affinity, uap->prio, retval); } int workq_open(struct proc *p, __unused struct workq_open_args *uap, int32_t *retval) { return pthread_functions->workq_open(p, retval); } /* pthread synchroniser syscalls */ int psynch_mutexwait(proc_t p, struct psynch_mutexwait_args *uap, uint32_t *retval) { return pthread_functions->psynch_mutexwait(p, uap->mutex, uap->mgen, uap->ugen, uap->tid, uap->flags, retval); } int psynch_mutexdrop(proc_t p, struct psynch_mutexdrop_args *uap, uint32_t *retval) { return pthread_functions->psynch_mutexdrop(p, uap->mutex, uap->mgen, uap->ugen, uap->tid, uap->flags, retval); } int psynch_cvbroad(proc_t p, struct psynch_cvbroad_args *uap, uint32_t *retval) { return pthread_functions->psynch_cvbroad(p, uap->cv, uap->cvlsgen, uap->cvudgen, uap->flags, uap->mutex, uap->mugen, uap->tid, retval); } int psynch_cvsignal(proc_t p, struct psynch_cvsignal_args *uap, uint32_t *retval) { return pthread_functions->psynch_cvsignal(p, uap->cv, uap->cvlsgen, uap->cvugen, uap->thread_port, uap->mutex, uap->mugen, uap->tid, uap->flags, retval); } int psynch_cvwait(proc_t p, struct psynch_cvwait_args * uap, uint32_t * retval) { return pthread_functions->psynch_cvwait(p, uap->cv, uap->cvlsgen, uap->cvugen, uap->mutex, uap->mugen, uap->flags, uap->sec, uap->nsec, retval); } int psynch_cvclrprepost(proc_t p, struct psynch_cvclrprepost_args * uap, int *retval) { return pthread_functions->psynch_cvclrprepost(p, uap->cv, uap->cvgen, uap->cvugen, uap->cvsgen, uap->prepocnt, uap->preposeq, uap->flags, retval); } int psynch_rw_longrdlock(proc_t p, struct psynch_rw_longrdlock_args * uap, uint32_t *retval) { return pthread_functions->psynch_rw_longrdlock(p, uap->rwlock, uap->lgenval, uap->ugenval, uap->rw_wc, uap->flags, retval); } int psynch_rw_rdlock(proc_t p, struct psynch_rw_rdlock_args * uap, uint32_t * retval) { return pthread_functions->psynch_rw_rdlock(p, uap->rwlock, uap->lgenval, uap->ugenval, uap->rw_wc, uap->flags, retval); } int psynch_rw_unlock(proc_t p, struct psynch_rw_unlock_args *uap, uint32_t *retval) { return pthread_functions->psynch_rw_unlock(p, uap->rwlock, uap->lgenval, uap->ugenval, uap->rw_wc, uap->flags, retval); } int psynch_rw_unlock2(__unused proc_t p, __unused struct psynch_rw_unlock2_args *uap, __unused uint32_t *retval) { return ENOTSUP; } int psynch_rw_wrlock(proc_t p, struct psynch_rw_wrlock_args *uap, uint32_t *retval) { return pthread_functions->psynch_rw_wrlock(p, uap->rwlock, uap->lgenval, uap->ugenval, uap->rw_wc, uap->flags, retval); } int psynch_rw_yieldwrlock(proc_t p, struct psynch_rw_yieldwrlock_args *uap, uint32_t *retval) { return pthread_functions->psynch_rw_yieldwrlock(p, uap->rwlock, uap->lgenval, uap->ugenval, uap->rw_wc, uap->flags, retval); } int psynch_rw_upgrade(__unused proc_t p, __unused struct psynch_rw_upgrade_args * uap, __unused uint32_t *retval) { return 0; } int psynch_rw_downgrade(__unused proc_t p, __unused struct psynch_rw_downgrade_args * uap, __unused int *retval) { return 0; } /* * The callbacks structure (defined in pthread_shims.h) contains a collection * of kernel functions that were not deemed sensible to expose as a KPI to all * kernel extensions. So the kext is given them in the form of a structure of * function pointers. */ static struct pthread_callbacks_s pthread_callbacks = { .version = PTHREAD_SHIMS_VERSION, .config_thread_max = CONFIG_THREAD_MAX, .get_task_threadmax = get_task_threadmax, .proc_get_threadstart = proc_get_threadstart, .proc_set_threadstart = proc_set_threadstart, .proc_get_pthsize = proc_get_pthsize, .proc_set_pthsize = proc_set_pthsize, .proc_get_wqthread = proc_get_wqthread, .proc_set_wqthread = proc_set_wqthread, .proc_get_targconc = proc_get_targconc, .proc_set_targconc = proc_set_targconc, .proc_get_dispatchqueue_offset = proc_get_dispatchqueue_offset, .proc_set_dispatchqueue_offset = proc_set_dispatchqueue_offset, .proc_get_wqptr = proc_get_wqptr, .proc_set_wqptr = proc_set_wqptr, .proc_get_wqsize = proc_get_wqsize, .proc_set_wqsize = proc_set_wqsize, .proc_get_wqlockptr = proc_get_wqlockptr, .proc_get_wqinitingptr = proc_get_wqinitingptr, .proc_get_pthhash = proc_get_pthhash, .proc_set_pthhash = proc_set_pthhash, .proc_get_task = proc_get_task, .proc_lock = proc_lock, .proc_unlock = proc_unlock, .proc_get_register = proc_get_register, .proc_set_register = proc_set_register, /* kernel IPI interfaces */ .ipc_port_copyout_send = ipc_port_copyout_send, .task_get_ipcspace = get_task_ipcspace, .vm_map_page_info = vm_map_page_info, .vm_map_switch = vm_map_switch, .thread_set_wq_state32 = thread_set_wq_state32, .thread_set_wq_state64 = thread_set_wq_state64, .uthread_get_threadlist = uthread_get_threadlist, .uthread_set_threadlist = uthread_set_threadlist, .uthread_get_sigmask = uthread_get_sigmask, .uthread_set_sigmask = uthread_set_sigmask, .uthread_get_uukwe = uthread_get_uukwe, .uthread_get_returnval = uthread_get_returnval, .uthread_set_returnval = uthread_set_returnval, .uthread_is_cancelled = uthread_is_cancelled, .thread_exception_return = pthread_returning_to_userspace, .thread_bootstrap_return = thread_bootstrap_return, .unix_syscall_return = unix_syscall_return, .absolutetime_to_microtime = absolutetime_to_microtime, .proc_restore_workq_bgthreadpolicy = proc_restore_workq_bgthreadpolicy, .proc_apply_workq_bgthreadpolicy = proc_apply_workq_bgthreadpolicy, .get_bsdthread_info = (void*)get_bsdthread_info, .thread_sched_call = thread_sched_call, .thread_static_param = thread_static_param, .thread_create_workq = thread_create_workq, .thread_policy_set_internal = thread_policy_set_internal, .thread_policy_get = thread_policy_get, .thread_set_voucher_name = thread_set_voucher_name, .thread_affinity_set = thread_affinity_set, .zalloc = zalloc, .zfree = zfree, .zinit = zinit, .__pthread_testcancel = __pthread_testcancel, .mach_port_deallocate = mach_port_deallocate, .semaphore_signal_internal_trap = semaphore_signal_internal_trap, .current_map = _current_map, .thread_create = thread_create, .thread_resume = thread_resume, .convert_thread_to_port = convert_thread_to_port, .ml_get_max_cpus = (void*)ml_get_max_cpus, .proc_get_dispatchqueue_serialno_offset = proc_get_dispatchqueue_serialno_offset, .proc_set_dispatchqueue_serialno_offset = proc_set_dispatchqueue_serialno_offset, .proc_get_stack_addr_hint = proc_get_stack_addr_hint, .proc_set_stack_addr_hint = proc_set_stack_addr_hint, .proc_get_pthread_tsd_offset = proc_get_pthread_tsd_offset, .proc_set_pthread_tsd_offset = proc_set_pthread_tsd_offset, .thread_set_tsd_base = thread_set_tsd_base, .proc_usynch_get_requested_thread_qos = proc_usynch_get_requested_thread_qos, .proc_usynch_thread_qos_add_override = proc_usynch_thread_qos_add_override, .proc_usynch_thread_qos_remove_override = proc_usynch_thread_qos_remove_override, .qos_main_thread_active = qos_main_thread_active, }; pthread_callbacks_t pthread_kern = &pthread_callbacks; pthread_functions_t pthread_functions = NULL; /* * pthread_kext_register is called by pthread.kext upon load, it has to provide * us with a function pointer table of pthread internal calls. In return, this * file provides it with a table of function pointers it needs. */ void pthread_kext_register(pthread_functions_t fns, pthread_callbacks_t *callbacks) { if (pthread_functions != NULL) { panic("Re-initialisation of pthread kext callbacks."); } if (callbacks != NULL) { *callbacks = &pthread_callbacks; } else { panic("pthread_kext_register called without callbacks pointer."); } if (fns) { pthread_functions = fns; } }
zwliew/ctci
solutions/ctci/solution.py
def solve(head, k): pass
MOAMaster/AudioPlugSharp-SamplePlugins
vst3sdk/doc/vstexamples/pitchnames_2source_2version_8h.js
var pitchnames_2source_2version_8h = [ [ "MAJOR_VERSION_STR", "pitchnames_2source_2version_8h.html#adb2701b32a110347dbec4c7d92b3cce2", null ], [ "MAJOR_VERSION_INT", "pitchnames_2source_2version_8h.html#a1b1d372b4f38ad0455414c7bcfb7150d", null ], [ "SUB_VERSION_STR", "pitchnames_2source_2version_8h.html#a8e969cd9064a794d4f76d0192491e634", null ], [ "SUB_VERSION_INT", "pitchnames_2source_2version_8h.html#ad82cb859636c1a68bbea1968d405ad7b", null ], [ "RELEASE_NUMBER_STR", "pitchnames_2source_2version_8h.html#a1d8d20b64fc672d1f458dc8dd40c4b9d", null ], [ "RELEASE_NUMBER_INT", "pitchnames_2source_2version_8h.html#a7e0ef6f4f5763c59157b59e105f86374", null ], [ "BUILD_NUMBER_STR", "pitchnames_2source_2version_8h.html#a376e4da35d8404a22dd75e5d188d178e", null ], [ "BUILD_NUMBER_INT", "pitchnames_2source_2version_8h.html#adfb14851d45258eee920f43b201279e2", null ], [ "FULL_VERSION_STR", "pitchnames_2source_2version_8h.html#ab61e5bc2342cce876090e4b50ca2f60f", null ], [ "VERSION_STR", "pitchnames_2source_2version_8h.html#acfc1668731750cb1ee8974b8a7c133ef", null ], [ "stringOriginalFilename", "pitchnames_2source_2version_8h.html#adb58dbe9424bfe0a2e594a5e6399c3e2", null ], [ "stringFileDescription", "pitchnames_2source_2version_8h.html#a9278205f41909b317803db604ed1075d", null ], [ "stringCompanyName", "pitchnames_2source_2version_8h.html#a959cc5c14daa39d176d88fffd211aa6c", null ], [ "stringLegalCopyright", "pitchnames_2source_2version_8h.html#a7b9ca76cb4cc59f027ddabbe5f3f2118", null ], [ "stringLegalTrademarks", "pitchnames_2source_2version_8h.html#aa863825ce130d257c1ba798e4e639d67", null ] ];
Valentin9003/Java
Databases Frameworks - Hibernate & Spring Data/Exercise - Spring Data Intro/users-system/src/main/java/users/system/service/impl/TownServiceImpl.java
package users.system.service.impl; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import users.system.repositories.TownRepo; import users.system.service.interfaces.TownService; import javax.transaction.Transactional; @Service @Transactional public class TownServiceImpl implements TownService { private final TownRepo townRepo; @Autowired public TownServiceImpl(TownRepo townRepo) { this.townRepo = townRepo; } }
moraispgsi/wire
old/flow/output-node.go
package flow import ( "strconv" "io" ) type OutputNode struct { Node getReader func(context *Context) io.Reader } func NewOutputNode(graph *Graph, getReader func(context *Context) io.Reader) *OutputNode { node := NewVoidNode(false) node = OutputNode { node, } graph.AddNode(&node) node.AddAcceptCondition(func (idNode1 int64, idNode2 int64) bool { var node2 interface{} node2, err := graph.GetNode(idNode2) if err != nil { return false } _, ok := node2.(*InputNode) //Type assertion return ok }) node.getReader = getReader return &node } func (node *OutputNode) GetReader(context *Context) io.Reader { return node.getReader(context) }
skochinsky/efiwrapper
libefiwrapper/ewvar.c
<filename>libefiwrapper/ewvar.c /* * Copyright (c) 2016, Intel Corporation * All rights reserved. * * Author: <NAME> <<EMAIL>> * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "ewvar.h" #include "lib.h" static ewvar_t *EFI_VARS; static ewvar_storage_t *storage; ewvar_t *ewvar_new(CHAR16 *name, EFI_GUID *guid, UINT32 attr, UINTN size, VOID *data) { EFI_STATUS ret; ewvar_t *var; var = calloc(1, sizeof(*var)); if (!var) return NULL; var->name = str16dup(name); if (!var->name) goto err; memcpy(&var->guid, guid, sizeof(var->guid)); var->attributes = attr; var->size = size; var->data = malloc(size); if (!var->data) goto err; memcpy(var->data, data, size); if (attr & EFI_VARIABLE_NON_VOLATILE && storage && storage->save) { ret = storage->save(var); if (EFI_ERROR(ret)) goto err; } return var; err: ewvar_free(var); return NULL; } void ewvar_free(ewvar_t *var) { if (var->data) free(var->data); if (var->name) free(var->name); free(var); } void ewvar_free_all(void) { ewvar_t *var, *next; for (var = EFI_VARS; var; var = next) { next = var->next; ewvar_free(var); } EFI_VARS = NULL; } void ewvar_add(ewvar_t *var) { var->next = EFI_VARS; EFI_VARS = var; } ewvar_t *ewvar_get(const CHAR16 *name, EFI_GUID *guid, ewvar_t **prev_p) { ewvar_t *var, *prev = NULL; for (var = EFI_VARS; var; var = var->next) { if (!str16cmp(name, var->name) && !guidcmp(&var->guid, guid)) break; prev = var; } if (var && prev_p) *prev_p = prev; return var; } ewvar_t *ewvar_get_first(void) { return EFI_VARS; } EFI_STATUS ewvar_del(ewvar_t *var, ewvar_t *prev) { EFI_STATUS ret; if (!var) return EFI_NOT_FOUND; if (var->attributes & EFI_VARIABLE_NON_VOLATILE && storage && storage->delete) { ret = storage->delete(var); if (EFI_ERROR(ret)) return ret; } if (prev) prev->next = var->next; else EFI_VARS = var->next; ewvar_free(var); return EFI_SUCCESS; } EFI_STATUS ewvar_update(ewvar_t *var, UINTN size, VOID *data) { if (var->attributes & EFI_VARIABLE_APPEND_WRITE) { var->data = realloc(var->data, var->size + size); if (!var->data) return EFI_OUT_OF_RESOURCES; memcpy((char *)var->data + var->size, data, size); var->size += size; } else { var->data = realloc(var->data, size); if (!var->data) return EFI_OUT_OF_RESOURCES; var->size = size; if (!var->data) return EFI_OUT_OF_RESOURCES; memcpy(var->data, data, size); } if (var->attributes & EFI_VARIABLE_NON_VOLATILE && storage && storage->save) return storage->save(var); return EFI_SUCCESS; } EFI_STATUS ewvar_register_storage(ewvar_storage_t *s) { if (!s) return EFI_INVALID_PARAMETER; storage = s; if (s->load) return s->load(); return EFI_SUCCESS; } EFI_STATUS ewvar_unregister_storage(void) { storage = NULL; return EFI_SUCCESS; }
dma-graveyard/MaritimeCloudPortalTestbed
src/main/java/net/maritimecloud/identityregistry/command/user/ResetPasswordSaga.java
<reponame>dma-graveyard/MaritimeCloudPortalTestbed<gh_stars>1-10 /* Copyright 2014 Danish Maritime Authority. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.maritimecloud.identityregistry.command.user; import javax.annotation.Resource; import net.maritimecloud.identityregistry.command.api.ChangeUserPassword; import net.maritimecloud.identityregistry.command.api.ResetPasswordKeyGenerated; import net.maritimecloud.identityregistry.command.api.UserPasswordChanged; import net.maritimecloud.portal.application.ApplicationServiceRegistry; import net.maritimecloud.common.eventsourcing.axon.NoReplayedEvents; import net.maritimecloud.portal.infrastructure.mail.MailService; import org.axonframework.commandhandling.gateway.CommandGateway; import org.axonframework.saga.annotation.AbstractAnnotatedSaga; import org.axonframework.saga.annotation.EndSaga; import org.axonframework.saga.annotation.SagaEventHandler; import org.axonframework.saga.annotation.StartSaga; /** * * @author <NAME> */ @NoReplayedEvents public class ResetPasswordSaga extends AbstractAnnotatedSaga { @Resource private transient CommandGateway commandGateway; private MailService mailService() { return ApplicationServiceRegistry.mailService(); } public CommandGateway getCommandGateway() { return commandGateway; } public void setCommandGateway(CommandGateway commandGateway) { this.commandGateway = commandGateway; } @StartSaga @SagaEventHandler(associationProperty = "userId") public void handle(ResetPasswordKeyGenerated event) { System.out.println("User " + event.getUsername() + " has requested to reset password using " + event.getEmailAddress() + "."); // compose and send out welcome and confirm email System.out.println("Sending out reset password instruction email with the reset password key: " + event.getResetPasswordKey()); mailService().sendResetPasswordMessage(event.getEmailAddress(), event.getUsername(), event.getResetPasswordKey()); // HACK: FIXME: TODO: // auto-confirm users that fulfil some criteria autoResetTestUsersPassword_HACK(event.getUserId(), event.getEmailAddress(), event.getResetPasswordKey()); } private String autoResetTestUsersPassword_HACK(UserId userId, String emailAddress, String resetPasswordKey) { // HACK: FIXME: TODO: // auto generate ResetPasswordCommand in odrer to auto-reset users password in test and demo without reading mails if (emailAddress.endsWith("@auto.demo.dma.dk")) { System.out.println("HACK for auto.demo.dma.dk dmoain: auto-reset password to 'reset' for user " + userId); commandGateway.send(new ChangeUserPassword(userId, resetPasswordKey, "reset")); } return resetPasswordKey; } @EndSaga @SagaEventHandler(associationProperty = "userId") public void handle(UserPasswordChanged event) { System.out.println("User " + event.getUsername() + " password has been changed."); //System.out.println("Sending out a 'Users password was reset' notification email to: " + event.getUsername()); } // FIXME TODO: add en expire saga trigger to end unanswered saga instances!!! //@EndSaga //... }
jasarsoft/fit-src
pr1/vjezbe/Zadatak01/Source.cpp
<reponame>jasarsoft/fit-src<filename>pr1/vjezbe/Zadatak01/Source.cpp<gh_stars>0 //prvi program #include <iostream> using namespace std; int main() { cout << "STO JE DANAS LIJEP DAN"; system("pause"); return 0; }
hehuiran/peach
app/src/main/java/me/jessyan/peach/shop/home/mvp/contract/SearchGoodsContract.java
package me.jessyan.peach.shop.home.mvp.contract; import com.jess.arms.mvp.IModel; import com.jess.arms.mvp.IView; import java.util.List; import io.reactivex.Observable; import me.jessyan.peach.shop.entity.search.SearchOptionalBean; import me.jessyan.peach.shop.entity.search.SearchRecordBean; /** * ================================================ * Description: * <p> * Created by MVPArmsTemplate on 12/20/2018 23:21 * <a href="mailto:<EMAIL>">Contact me</a> * <a href="https://github.com/JessYanCoding">Follow me</a> * <a href="https://github.com/JessYanCoding/MVPArms">Star me</a> * <a href="https://github.com/JessYanCoding/MVPArms/wiki">See me</a> * <a href="https://github.com/JessYanCoding/MVPArmsTemplate">模版请保持更新</a> * ================================================ */ public interface SearchGoodsContract { //对于经常使用的关于UI的方法可以定义到IView中,如显示隐藏进度条,和显示文字消息 interface View extends IView { void onGetSearchDataSuccess(SearchOptionalBean bean); void onSaveSearchRecordSuccess(List<SearchRecordBean> recordList); } //Model层定义接口,外部只需关心Model返回的数据,无需关心内部细节,即是否使用缓存 interface Model extends IModel { Observable<SearchOptionalBean> getSearchData(); Observable<List<SearchRecordBean>> saveSearchRecord(String value); } }
snosov1/opencv
apps/interactive-calibration/frameProcessor.hpp
<filename>apps/interactive-calibration/frameProcessor.hpp<gh_stars>1-10 #ifndef FRAME_PROCESSOR_HPP #define FRAME_PROCESSOR_HPP #include <opencv2/core.hpp> #include <opencv2/aruco/charuco.hpp> #include <opencv2/calib3d.hpp> #include "calibCommon.hpp" #include "calibController.hpp" namespace calib { class FrameProcessor { protected: public: virtual ~FrameProcessor(); virtual cv::Mat processFrame(const cv::Mat& frame) = 0; virtual bool isProcessed() const = 0; virtual void resetState() = 0; }; class CalibProcessor : public FrameProcessor { protected: cv::Ptr<calibrationData> mCalibData; TemplateType mBoardType; cv::Size mBoardSize; std::vector<cv::Point2f> mTemplateLocations; std::vector<cv::Point2f> mCurrentImagePoints; cv::Mat mCurrentCharucoCorners; cv::Mat mCurrentCharucoIds; cv::Ptr<cv::SimpleBlobDetector> mBlobDetectorPtr; cv::Ptr<cv::aruco::Dictionary> mArucoDictionary; cv::Ptr<cv::aruco::CharucoBoard> mCharucoBoard; int mNeededFramesNum; unsigned mDelayBetweenCaptures; int mCapuredFrames; double mMaxTemplateOffset; float mSquareSize; float mTemplDist; bool detectAndParseChessboard(const cv::Mat& frame); bool detectAndParseChAruco(const cv::Mat& frame); bool detectAndParseACircles(const cv::Mat& frame); bool detectAndParseDualACircles(const cv::Mat& frame); void saveFrameData(); void showCaptureMessage(const cv::Mat &frame, const std::string& message); bool checkLastFrame(); public: CalibProcessor(cv::Ptr<calibrationData> data, captureParameters& capParams); virtual cv::Mat processFrame(const cv::Mat& frame); virtual bool isProcessed() const; virtual void resetState(); ~CalibProcessor(); }; enum visualisationMode {Grid, Window}; class ShowProcessor : public FrameProcessor { protected: cv::Ptr<calibrationData> mCalibdata; cv::Ptr<calibController> mController; TemplateType mBoardType; visualisationMode mVisMode; bool mNeedUndistort; double mGridViewScale; double mTextSize; void drawBoard(cv::Mat& img, cv::InputArray points); void drawGridPoints(const cv::Mat& frame); public: ShowProcessor(cv::Ptr<calibrationData> data, cv::Ptr<calibController> controller, TemplateType board); virtual cv::Mat processFrame(const cv::Mat& frame); virtual bool isProcessed() const; virtual void resetState(); void setVisualizationMode(visualisationMode mode); void switchVisualizationMode(); void clearBoardsView(); void updateBoardsView(); void switchUndistort(); void setUndistort(bool isEnabled); ~ShowProcessor(); }; } #endif
jspm/babel-plugin-transform-cjs-dew
test/fixtures/dew-interop/expected.js
<reponame>jspm/babel-plugin-transform-cjs-dew import * as _x from "./x"; var exports = {}, _dewExec = false; export function dew() { if (_dewExec) return exports; _dewExec = true; exports.x = 5; exports.y = _x.__dew ? _x.__dew() : _x.default; return exports; }
Andreas237/AndroidPolicyAutomation
ExtractedJars/Ibotta_com.ibotta.android/javafiles/com/bumptech/glide/load/data/AssetFileDescriptorLocalUriFetcher.java
// Decompiled by Jad v1.5.8g. Copyright 2001 <NAME>. // Jad home page: http://www.kpdus.com/jad.html // Decompiler options: packimports(3) annotate safe package com.bumptech.glide.load.data; import android.content.ContentResolver; import android.content.res.AssetFileDescriptor; import android.net.Uri; import java.io.FileNotFoundException; import java.io.IOException; // Referenced classes of package com.bumptech.glide.load.data: // LocalUriFetcher public final class AssetFileDescriptorLocalUriFetcher extends LocalUriFetcher { public AssetFileDescriptorLocalUriFetcher(ContentResolver contentresolver, Uri uri) { super(contentresolver, uri); // 0 0:aload_0 // 1 1:aload_1 // 2 2:aload_2 // 3 3:invokespecial #9 <Method void LocalUriFetcher(ContentResolver, Uri)> // 4 6:return } protected void close(AssetFileDescriptor assetfiledescriptor) throws IOException { assetfiledescriptor.close(); // 0 0:aload_1 // 1 1:invokevirtual #19 <Method void AssetFileDescriptor.close()> // 2 4:return } protected volatile void close(Object obj) throws IOException { close((AssetFileDescriptor)obj); // 0 0:aload_0 // 1 1:aload_1 // 2 2:checkcast #16 <Class AssetFileDescriptor> // 3 5:invokevirtual #23 <Method void close(AssetFileDescriptor)> // 4 8:return } public Class getDataClass() { return android/content/res/AssetFileDescriptor; // 0 0:ldc1 #16 <Class AssetFileDescriptor> // 1 2:areturn } protected AssetFileDescriptor loadResource(Uri uri, ContentResolver contentresolver) throws FileNotFoundException { contentresolver = ((ContentResolver) (contentresolver.openAssetFileDescriptor(uri, "r"))); // 0 0:aload_2 // 1 1:aload_1 // 2 2:ldc1 #35 <String "r"> // 3 4:invokevirtual #41 <Method AssetFileDescriptor ContentResolver.openAssetFileDescriptor(Uri, String)> // 4 7:astore_2 if(contentresolver != null) //* 5 8:aload_2 //* 6 9:ifnull 14 { return ((AssetFileDescriptor) (contentresolver)); // 7 12:aload_2 // 8 13:areturn } else { contentresolver = ((ContentResolver) (new StringBuilder())); // 9 14:new #43 <Class StringBuilder> // 10 17:dup // 11 18:invokespecial #45 <Method void StringBuilder()> // 12 21:astore_2 ((StringBuilder) (contentresolver)).append("FileDescriptor is null for: "); // 13 22:aload_2 // 14 23:ldc1 #47 <String "FileDescriptor is null for: "> // 15 25:invokevirtual #51 <Method StringBuilder StringBuilder.append(String)> // 16 28:pop ((StringBuilder) (contentresolver)).append(((Object) (uri))); // 17 29:aload_2 // 18 30:aload_1 // 19 31:invokevirtual #54 <Method StringBuilder StringBuilder.append(Object)> // 20 34:pop throw new FileNotFoundException(((StringBuilder) (contentresolver)).toString()); // 21 35:new #33 <Class FileNotFoundException> // 22 38:dup // 23 39:aload_2 // 24 40:invokevirtual #58 <Method String StringBuilder.toString()> // 25 43:invokespecial #61 <Method void FileNotFoundException(String)> // 26 46:athrow } } protected volatile Object loadResource(Uri uri, ContentResolver contentresolver) throws FileNotFoundException { return ((Object) (loadResource(uri, contentresolver))); // 0 0:aload_0 // 1 1:aload_1 // 2 2:aload_2 // 3 3:invokevirtual #64 <Method AssetFileDescriptor loadResource(Uri, ContentResolver)> // 4 6:areturn } }
bejphil/FoxFire-regression
docs/dir_d5bac1d80f65f7075574489e24503a17.js
<reponame>bejphil/FoxFire-regression var dir_d5bac1d80f65f7075574489e24503a17 = [ [ "debug.hpp", "debug_8hpp.html", "debug_8hpp" ], [ "generics.hpp", "generics_8hpp.html", "generics_8hpp" ], [ "ocl_debug.cpp", "ocl__debug_8cpp_source.html", null ], [ "ocl_debug.hpp", "ocl__debug_8hpp.html", "ocl__debug_8hpp" ] ];
AhmedLeithy/Recognizers-Text
Java/libraries/recognizers-text-number/src/main/java/com/microsoft/recognizers/text/number/NumberOptions.java
<reponame>AhmedLeithy/Recognizers-Text // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.microsoft.recognizers.text.number; public enum NumberOptions { None(0), PercentageMode(1), ExperimentalMode(4194304); // 2 ^22 private final int value; NumberOptions(int value) { this.value = value; } public int getValue() { return value; } }
DroidSoul/InterviewPrep
LinkedLists/MergeTwoSortedLists.java
/** * Definition for singly-linked list. * class ListNode { * public int val; * public ListNode next; * ListNode(int x) { val = x; next = null; } * } */ public class Solution { public ListNode mergeTwoLists(ListNode a, ListNode b) { ListNode dummy = new ListNode(-1); ListNode head = dummy; while (a != null || b != null) { if (a == null) { head.next = b; break; } else if (b == null) { head.next = a; break; } else { if(a.val <= b.val) { head.next = a; a = a.next; } else { head.next = b; b = b.next; } } head = head.next; } return dummy.next; } }
huangsongyao/HSYUIArchitectureToolsKit
HSYUIArchitectureToolsKit/Classes/Model/HSYBaseCustomSegmentedPageModel.h
<filename>HSYUIArchitectureToolsKit/Classes/Model/HSYBaseCustomSegmentedPageModel.h // // HSYBaseCustomSegmentedPageModel.h // HSYUIToolsKit // // Created by anmin on 2019/12/25. // #import "HSYBaseModel.h" #import "HSYBaseCustomSegmentedPageControlItem.h" #import <HSYMethodsToolsKit/UIViewController+Load.h> NS_ASSUME_NONNULL_BEGIN @interface HSYBaseCustomSegmentedPageControlModel : HSYBaseModel //未选中时的image,url或者图标名称 @property (nonatomic, copy) NSString *image; //选中时的image,url或者图标名称 @property (nonatomic, copy) NSString *highImage; //默认占位图,如果image和highImage为url时,这个属性才有效 @property (nonatomic, copy) NSString *placeholderImage; //按钮的title @property (nonatomic, copy) NSString *title; //未选中时的title的字体字号 @property (nonatomic, strong) UIFont *normalFont; //选中时的title的字体字号 @property (nonatomic, strong) UIFont *selectedFont; //未选中时的title的字体颜色 @property (nonatomic, strong) UIColor *normalTextColor; //选中时的title的字体颜色 @property (nonatomic, strong) UIColor *selectedTextColor; //按钮的选中状态 @property (nonatomic, strong) NSNumber *selectedStatus; //按钮的width宽度 @property (nonatomic, strong) NSNumber *itemWidths; //按钮的tag,即按钮位于UIScrollView的hsy_subviews或者subviews中的位置 @property (nonatomic, strong) NSNumber *itemId; /** 返回根据选中状态区分的当前图标或者远端地址 @return 当前图标或者远端地址 */ - (NSString *)hsy_controlItemImage; /** 返回根据选中状态区分的当前按钮title字体字号 @return 当前按钮title字体字号 */ - (UIFont *)hsy_controlItemFont; /** 返回根据选中状态区分的当前按钮title字体颜色 @return 当前按钮title字体颜色 */ - (UIColor *)hsy_controlTextColor; @end //******************************************************************************************************************************************************************************************************************************************************** @class RACSignal; @interface HSYBaseCustomSegmentedPageModel : HSYBaseModel //按钮item项的参数模型对应的集合 @property (nonatomic, strong) NSMutableArray<HSYBaseCustomSegmentedPageControlModel *> *controlModels; //HSYBaseCustomSegmentedPageControl的背景图片或者背景图片的远端url @property (nonatomic, copy) NSString *controlBackgroundImage; //当self.controlBackgroundImage为远端url时有效,表示默认的占位背景图 @property (nonatomic, copy) NSString *controlBackgroundPlaceholderImage; //是否显示选中状态的下划线 @property (nonatomic, strong) NSNumber *showControlLine; //选中状态的下划线的颜色 @property (nonatomic, strong) UIColor *controlLineColor; //选中状态的下划线的粗细=>NSValue=>CGSize @property (nonatomic, strong) NSValue *controlLineThickness; //选中状态的下划线的圆角角度 @property (nonatomic, strong) NSNumber *controlLineCirculars; //选中状态的下划线距离底部的便宜量 @property (nonatomic, strong) NSNumber *controlLineOffsetBottoms; //HSYBaseCustomSegmentedPageControl的底部横线的颜色 @property (nonatomic, strong) UIColor *controlBottomLineColor; //HSYBaseCustomSegmentedPageControl的底部横线的粗细 @property (nonatomic, strong) NSNumber *controlBottomLineThickness; //是否显示HSYBaseCustomSegmentedPageControl的底部横线 @property (nonatomic, strong) NSNumber *showControlBottomLine; //HSYBaseCustomSegmentedPageControl的UIScrollView是否可滚动 @property (nonatomic, strong) NSNumber *scrollEnabledStatus; //是否使用自适应格式,使用自适应格式,HSYBaseCustomSegmentedPageControl的长度会根据UIScrollView的contentSize.width进行适应,最大为屏幕宽度 @property (nonatomic, strong) NSNumber *adaptiveFormat; //外部设置HSYBaseCustomSegmentedPageControl的显示宽度,如果self.adaptiveFormat = YES,则这个属性不生效 @property (nonatomic, strong) NSNumber *controlWidths; //是否将HSYBaseCustomSegmentedPageControl添加在titleView头部 @property (nonatomic, strong) NSNumber *titleViewFormat; /** 根据self.controlModels中的参数模型的selectedStatus属性的选中状态,返回HSYBaseCustomSegmentedPageControl当前选中的位置,如果self.controlModels中的参数模型的selectedStatus属性都为NO,则默认返回0 @return 选中的位置 */ - (NSInteger)hsy_itemSelectedIndex; /** 是否显示选中状态的下划线 @return 是否显示选中状态的下划线 */ - (BOOL)hsy_showSelecteLine; /** 是否显示HSYBaseCustomSegmentedPageControl的下底部横线 @return 是否显示HSYBaseCustomSegmentedPageControl的下底部横线 */ - (BOOL)hsy_showControlBottomLine; /** 将self.controlModels的所有参数模型的selectedStatus属性全部重置为NO @param index 选中位置的itemiId @return 返回一个包含了选中项的HSYBaseCustomSegmentedPageControlModel的参数模型的RACSignal结果信号 */ - (RACSignal *)hsy_resetControlModelsUnselectedStatus:(NSInteger)index; /** 选中状态的下划线的size @param contentWidths HSYBaseCustomSegmentedPageControl的内部UIScrollView滚动条的contentSize.width @return 选中状态的下划线的size */ - (CGSize)hsy_selectedLineCGSize:(CGFloat)contentWidths; /** 返回HSYBaseCustomSegmentedPageControl的item视图集合 @return HSYBaseCustomSegmentedPageControl的item视图集合 */ - (NSArray<HSYBaseCustomSegmentedPageControlItem *> *)hsy_toSegmentedPageControlItems:(HSYBaseCustomSegmentedPageControlItemActionBlock)action; /** 返回HSYBaseCustomSegmentedPageControl的底部横线的高度 @return HSYBaseCustomSegmentedPageControl的底部横线的高度 */ - (CGFloat)hsy_toBottomLineHeights; /** HSYBaseCustomSegmentedPageControl的UIScrollView是否可滚动,如果self.scrollEnabledStatus != nil,则以self.scrollEnabledStatus为准,否则返回默认的YES @return HSYBaseCustomSegmentedPageControl的UIScrollView是否可滚动 */ - (BOOL)hsy_scrollEnabled; /** 设置HSYBaseCustomSegmentedPageControl的背景图 @param backgroundImageView HSYBaseCustomSegmentedPageControl的背景图的UIImageView对象 */ - (void)hsy_setControlItemBackgroundImage:(UIImageView *)backgroundImageView; /** HSYBaseCustomSegmentedPageControl的选中的下划线颜色,如果self.controlLineColor == nil,则默认返回(51, 51, 51) @return UIColor */ - (UIColor *)hsy_toControlSelectedLineColor; /** HSYBaseCustomSegmentedPageControl的底部横线颜色,如果self.controlBottomLineColor == nil,则返回默认的(0x999999) @return UIColor */ - (UIColor *)hsy_toControlBottomLineColor; /** 外部设置HSYBaseCustomSegmentedPageControl的显示宽度,如果self.adaptiveFormat = YES,则这个属性不生效,否则如果self.adaptiveFormat = NO & self.controlWidths = nil,默认返回设备屏幕宽度 @return CGFloat */ - (CGFloat)hsy_toControlWidths; /** 返回选中状态下划线距离底部的偏移量 @return 选中状态下划线距离底部的偏移量 */ - (CGFloat)hsy_toControlLineOffsetBottoms; /** HSYBaseCustomSegmentedPageControl的存在形式,返回self.titleViewFormat.boolValue,如果为NO,则HSYBaseCustomSegmentedPageControl在(0, 0)位置,如果为YES,则HSYBaseCustomSegmentedPageControl在titleView位置,默认返回YES @return HSYBaseCustomSegmentedPageControl的存在形式 */ - (BOOL)hsy_segmentedPageControlTitleViewFormat; @end //******************************************************************************************************************************************************************************************************************************************************** @interface HSYBaseCustomSegmentedPageControllerModel : HSYBaseModel //子控制器的参数集合,格式为:@[@{@"UIViewController-A类名" : @{@"UIViewController-A.property-a" : @"UIViewController-A.property-a=>value", @"UIViewController-A.property-b" : @"UIViewController-A.property-b=>value'", ... }}, @{@"UIViewController-B类名" : @{@"UIViewController-B.property-a" : @"UIViewController-B.property-a=>value", @"UIViewController-B.property-b" : @"UIViewController-B.property-b=>value'", ... }}, ... ] @property (nonatomic, copy) NSArray<NSDictionary<NSString *, NSDictionary<NSString *, id> *> *> *viewControllers; //HSYBaseCustomSegmentedPageControl的参数模型 @property (nonatomic, strong) HSYBaseCustomSegmentedPageModel *segmentedPageControlModel; /** 返回分页控制器的子控制器对象的集合 @param delegate UIViewControllerRuntimeDelegate委托 @return NSArray<UIViewController *> * => 子控制器对象的集合 */ - (NSArray<UIViewController *> *)hsy_toViewControllers:(id<UIViewControllerRuntimeDelegate>)delegate; /** 返回根据self.segmentedPageControlModel.hsy_toControlWidths的值均等分后的HSYBaseCustomSegmentedPageControlItem的宽度 @return HSYBaseCustomSegmentedPageControlItem的均等分宽度 */ - (CGFloat)hsy_toAreEqualControlItemWidths; @end NS_ASSUME_NONNULL_END
inqwell/inq
src/main/java/com/inqwell/any/SimpleEvent.java
<filename>src/main/java/com/inqwell/any/SimpleEvent.java /** * Copyright (C) 2011 Inqwell Ltd * * You may distribute under the terms of the Artistic License, as specified in * the README file. */ /* * $Archive: /src/com/inqwell/any/SimpleEvent.java $ * $Author: sanderst $ * $Revision: 1.2 $ * $Date: 2011-04-07 22:18:20 $ */ package com.inqwell.any; /** * A general purpose concrete event. The use of the * event parameter is not specified by this class itself, it is up * to clients and the event's listeners to agree on what it means. */ public class SimpleEvent extends AbstractEvent implements Cloneable { protected Any param_; public SimpleEvent(Any eventType) { this(eventType, null, null); } public SimpleEvent(Any eventType, Any context) { this(eventType, context, null); } public SimpleEvent(Any eventType, Any context, Any param) { super(eventType); setContext(context); param_ = param; } public void setParameter (Any a) { param_ = a; } public Any getParameter () { return param_; } }
williewheeler/zkybase
ui/src/test/java/org/zkybase/web/controller/application/ApplicationCrudControllerTests.java
<reponame>williewheeler/zkybase /* * ApplicationCrudControllerTests.java * * Copyright 2011-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zkybase.web.controller.application; import static org.mockito.Matchers.anyLong; import static org.mockito.Mockito.when; import org.mockito.Mock; import org.zkybase.model.Application; import org.zkybase.service.ApplicationService; import org.zkybase.service.FarmService; import org.zkybase.web.controller.AbstractCrudControllerTests; /** * @author <NAME> (<EMAIL>) */ public class ApplicationCrudControllerTests extends AbstractCrudControllerTests<Application> { // Dependencies @Mock private ApplicationService applicationService; @Mock private FarmService farmService; // Test objects @Mock private Application application; /* (non-Javadoc) * @see org.zkybase.web.controller.AbstractCrudControllerTests#doSetUp() */ @Override protected void doSetUp() throws Exception { when(applicationService.findOne(anyLong())).thenReturn(application); } }
kadds/NaOS
naos/includes/kernel/fs/vfs/pseudo.hpp
<filename>naos/includes/kernel/fs/vfs/pseudo.hpp #pragma once #include "../../util/circular_buffer.hpp" #include "../../wait.hpp" #include "common.hpp" namespace fs::vfs { /// pseudo device interface class pseudo_t { public: virtual i64 write(const byte *data, u64 size, flag_t flags) = 0; virtual i64 read(byte *data, u64 max_size, flag_t flags) = 0; virtual void close() = 0; virtual ~pseudo_t() {} }; class pseudo_pipe_t : public pseudo_t { util::circular_buffer<byte> buffer; task::wait_queue_t wait_queue; std::atomic_bool is_close; friend bool pipe_write_func(u64 data); friend bool pipe_read_func(u64 data); public: i64 write(const byte *data, u64 size, flag_t flags) override; i64 read(byte *data, u64 max_size, flag_t flags) override; void close() override; pseudo_pipe_t(u64 size = 512) : buffer(memory::MemoryAllocatorV, size) , is_close(false) { } }; } // namespace fs::vfs
Akhandalmanimalik/MortgageApp
app/com/syml/hibernate/dao/IPostGresDaoService.java
package com.syml.hibernate.dao; import java.util.ArrayList; import java.util.List; import controllers.Applicant; import controllers.ApplicantOpportunityRelationShip; import controllers.Contact; import controllers.Lead; import controllers.Opportunity; import controllers.Referral_Source; public interface IPostGresDaoService { //CREATE NEW APPLICANT(S) AND UPDATE OPPORTUNITY DETAILS public Opportunity createApplicant(Opportunity opprotunity) throws PostGressDaoServiceException; //UPDATE OPPORTUNITY DETAILS OF PAGE2 INTO crm_lead Table public void updateOpportunityPage2(Opportunity opprotunity) throws PostGressDaoServiceException; //UPDATE OPPORTUNITY DETAILS OF PAGE3 INTO crm_lead TABLE public void updateOpportuinityPage3(Opportunity opportunity) throws PostGressDaoServiceException; //UPDATE OPPORTUNITY DETAILS OF PAGE4 INTO crm_lead TABLE public void updateOpportunityPage4(Opportunity opportunity) throws PostGressDaoServiceException; //UPDATE OPPORTUNITY DETAILS OF PAGE5a INTO crm_lead TABLE public void updateOpportunityPage5a(Opportunity opportunity) throws PostGressDaoServiceException; //UPDATE OPPORTUNITY DETAILS OF PAGE5b INTO crm_lead TABLE public void updateOpportunityPage5b(Opportunity opportunity)throws PostGressDaoServiceException; //UPDATE APPLICANT(S) DETAILS OF PAGE6 INTO applicant_record TABLE. public void updateApplicantPage6(Opportunity opportunity)throws PostGressDaoServiceException; //UPDATE APPLICANT(S) DETAILS OF PAGE7 INTO applicant_address TABLE public void updateApplicantPage7(Opportunity opportunity)throws PostGressDaoServiceException; //UPDATE APPLICANT(S) DETAILS OF PAGE8 INTO income_employer TABLE public void updateApplicantIncomePage8Or9(Opportunity opportunity) throws PostGressDaoServiceException; //UPDATE APPLICANT(S) DETAILS OF PAGE8 INTO applicant_property TABLE public void updateApplicantAssetsPage10(Applicant applicant); public void updateApplicantAssetsPage10(Opportunity opportunity)throws PostGressDaoServiceException; //UPDATE APPLICANT(S) DETAILS OF PAGE10 INTO income_employer TABLE public Opportunity updateApplicantPropertyPage11(Opportunity opportunity)throws PostGressDaoServiceException; public Opportunity updateApplicantMortgagePage11(Opportunity opportunity) throws PostGressDaoServiceException ; public void deleteApplicantRelationshipWithOpportunity(ApplicantOpportunityRelationShip apShip)throws PostGressDaoServiceException; public Applicant insertApplicantPageOneDetails (Applicant applicant); public void updateOpportunitySignDetails(Opportunity opportunity) throws PostGressDaoServiceException; public void deleteApplicantAssets(Opportunity opportunity) throws PostGressDaoServiceException; public void deleteApplicantPropertys(Opportunity opportunity) throws PostGressDaoServiceException; //TO READ THE OPPORTUNITY DETAILS //public void getOpportunityDetails(int id); //public void insertLead(Lead crm); //public Referral_Source getReferralSourceById(int id); //INSERT ONLY APPLICANT DETAILS INTO applicant_record table //public void insertApplicantRecord(Applicant opportunity); public Contact insertContact(Contact contact)throws PostGressDaoServiceException; public List<Contact> getContact(Contact contact)throws PostGressDaoServiceException; public Contact getContactById(int id)throws PostGressDaoServiceException; public List<Contact> getContactByEmailAndLastName(Contact contact)throws PostGressDaoServiceException; public Lead insertLead(Lead crm) throws PostGressDaoServiceException; public Lead getLeadByConatctId(Contact contact)throws PostGressDaoServiceException; public List<Lead> getLead(Lead lead) throws PostGressDaoServiceException; public Referral_Source getReferralSourceById(int id)throws PostGressDaoServiceException; public ArrayList<Referral_Source> getReferral_SourceByPartnerID(int partner_id)throws PostGressDaoServiceException; public ArrayList<Referral_Source> getReferral_SourceByEmailAndName(String email,String name)throws PostGressDaoServiceException; public ArrayList<Referral_Source> getReferral_SourceByEmail(String email)throws PostGressDaoServiceException; public Referral_Source insertReferral(Referral_Source Referral_Source)throws PostGressDaoServiceException; public Referral_Source updateReferral(Referral_Source Referral_Source)throws PostGressDaoServiceException; public int getStateID(String province)throws PostGressDaoServiceException; }