repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
KavithaSiva/jenkins-library
|
pkg/validation/validation.go
|
package validation
import (
"errors"
"fmt"
"reflect"
"strconv"
"strings"
"github.com/go-playground/locales/en"
ut "github.com/go-playground/universal-translator"
valid "github.com/go-playground/validator/v10"
en_translations "github.com/go-playground/validator/v10/translations/en"
)
type Translation struct {
Tag string
RegisterFn valid.RegisterTranslationsFunc
TranslationFn valid.TranslationFunc
}
type validation struct {
Validator *valid.Validate
Translator ut.Translator
}
type validationOption func(*validation) error
func New(opts ...validationOption) (*validation, error) {
validator := valid.New()
validator.RegisterValidation("possible-values", isPossibleValues)
enTranslator := en.New()
universalTranslator := ut.New(enTranslator, enTranslator)
translator, found := universalTranslator.GetTranslator("en")
if !found {
return nil, errors.New("translator for en locale is not found")
}
validation := &validation{
Validator: validator,
Translator: translator,
}
for _, opt := range opts {
if err := opt(validation); err != nil {
return nil, err
}
}
return validation, nil
}
func WithJSONNamesForStructFields() validationOption {
return func(v *validation) error {
v.Validator.RegisterTagNameFunc(func(fld reflect.StructField) string {
name := strings.SplitN(fld.Tag.Get("json"), ",", 2)[0]
return name
})
return nil
}
}
func WithPredefinedErrorMessages() validationOption {
translations := []Translation{
{
Tag: "possible-values",
RegisterFn: func(ut ut.Translator) error {
return ut.Add("possible-values", "The {0} must use the following values: {1}", true)
},
TranslationFn: func(ut ut.Translator, fe valid.FieldError) string {
t, _ := ut.T("possible-values", fe.Field(), fe.Param())
return t
},
}, {
Tag: "required_if",
RegisterFn: func(ut ut.Translator) error {
// TODO: Improve the message for condition required_if for several fields
return ut.Add("required_if", "The {0} is required since the {1} is {2}", true)
},
TranslationFn: func(ut ut.Translator, fe valid.FieldError) string {
params := []string{fe.Field()}
params = append(params, strings.Split(fe.Param(), " ")...)
t, _ := ut.T("required_if", params...)
return t
},
},
}
return func(v *validation) error {
if err := registerTranslations(translations, v.Validator, v.Translator); err != nil {
return err
}
return nil
}
}
func WithCustomErrorMessages(translations []Translation) validationOption {
return func(v *validation) error {
if err := registerTranslations(translations, v.Validator, v.Translator); err != nil {
return err
}
return nil
}
}
func (v *validation) ValidateStruct(s interface{}) error {
var errStr string
errs := v.Validator.Struct(s)
if errs != nil {
if err, ok := errs.(*valid.InvalidValidationError); ok {
return err
}
for _, err := range errs.(valid.ValidationErrors) {
errStr += err.Translate(v.Translator) + ". "
}
return errors.New(errStr)
}
return nil
}
func registerTranslations(translations []Translation, validator *valid.Validate, translator ut.Translator) error {
if err := en_translations.RegisterDefaultTranslations(validator, translator); err != nil {
return err
}
for _, t := range translations {
if err := validator.RegisterTranslation(t.Tag, translator, t.RegisterFn, t.TranslationFn); err != nil {
return err
}
}
return nil
}
func isPossibleValues(fl valid.FieldLevel) bool {
vals := strings.Split(strings.TrimSpace(fl.Param()), " ")
field := fl.Field()
switch field.Kind() {
case reflect.String:
val := field.String()
// Empty value can be used
vals = append(vals, "")
return contains(vals, val)
case reflect.Int:
val := strconv.FormatInt(field.Int(), 10)
return contains(vals, val)
case reflect.Slice:
slice, ok := field.Interface().([]string)
if !ok {
panic("Only []string can be used as slice type")
}
for _, val := range slice {
if !contains(vals, val) {
return false
}
}
return true
default:
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
}
}
func contains(slice []string, str string) bool {
for _, v := range slice {
if v == str {
return true
}
}
return false
}
|
nate-eisner/braintree_android
|
SamsungPay/src/main/java/com/braintreepayments/api/SamsungPayInternalClient.java
|
package com.braintreepayments.api;
import android.content.Context;
import android.os.Bundle;
import androidx.annotation.VisibleForTesting;
import com.samsung.android.sdk.samsungpay.v2.PartnerInfo;
import com.samsung.android.sdk.samsungpay.v2.SamsungPay;
import com.samsung.android.sdk.samsungpay.v2.SpaySdk;
import com.samsung.android.sdk.samsungpay.v2.StatusListener;
import com.samsung.android.sdk.samsungpay.v2.payment.CardInfo;
import com.samsung.android.sdk.samsungpay.v2.payment.CustomSheetPaymentInfo;
import com.samsung.android.sdk.samsungpay.v2.payment.PaymentManager;
import com.samsung.android.sdk.samsungpay.v2.payment.sheet.CustomSheet;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static com.braintreepayments.api.SamsungPayMapAcceptedCardBrands.mapToSamsungPayCardBrands;
import static com.samsung.android.sdk.samsungpay.v2.SpaySdk.ERROR_SPAY_APP_NEED_TO_UPDATE;
import static com.samsung.android.sdk.samsungpay.v2.SpaySdk.ERROR_SPAY_SETUP_NOT_COMPLETED;
import static com.samsung.android.sdk.samsungpay.v2.SpaySdk.SPAY_NOT_READY;
import static com.samsung.android.sdk.samsungpay.v2.SpaySdk.SPAY_NOT_SUPPORTED;
import static com.samsung.android.sdk.samsungpay.v2.SpaySdk.SPAY_READY;
class SamsungPayInternalClient {
private final SamsungPay samsungPay;
private final PaymentManager paymentManager;
private final Set<SpaySdk.Brand> braintreeAcceptedCardBrands;
SamsungPayInternalClient(BraintreeClient braintreeClient, Configuration configuration) {
this(braintreeClient.getApplicationContext(), configuration, new SamsungPayPartnerInfoBuilder()
.setConfiguration(configuration)
.setSessionId(braintreeClient.getSessionId())
.setIntegrationType(braintreeClient.getIntegrationType())
.build());
}
private SamsungPayInternalClient(Context context, Configuration configuration, PartnerInfo partnerInfo) {
this(configuration, new SamsungPay(context, partnerInfo), new PaymentManager(context, partnerInfo));
}
@VisibleForTesting
SamsungPayInternalClient(Configuration configuration, SamsungPay samsungPay, PaymentManager paymentManager) {
this.braintreeAcceptedCardBrands = mapToSamsungPayCardBrands(configuration.getSupportedCardTypes());
this.samsungPay = samsungPay;
this.paymentManager = paymentManager;
}
void goToSamsungPayUpdatePage() {
samsungPay.goToUpdatePage();
}
void activateSamsungPay() {
samsungPay.activateSamsungPay();
}
void updateCustomSheet(CustomSheet customSheet) {
paymentManager.updateSheet(customSheet);
}
void getSamsungPayStatus(final GetSamsungPayStatusCallback callback) {
samsungPay.getSamsungPayStatus(new StatusListener() {
@Override
public void onSuccess(int statusCode, Bundle bundle) {
Exception samsungPayError = null;
@SamsungPayError int error = SamsungPayError.SAMSUNG_PAY_ERROR_UNKNOWN;
if (statusCode != SPAY_READY) {
if (bundle != null && bundle.containsKey(SamsungPay.EXTRA_ERROR_REASON)) {
int reason = bundle.getInt(SamsungPay.EXTRA_ERROR_REASON);
switch (reason) {
case ERROR_SPAY_APP_NEED_TO_UPDATE:
error = SamsungPayError.SAMSUNG_PAY_APP_NEEDS_UPDATE;
break;
case ERROR_SPAY_SETUP_NOT_COMPLETED:
error = SamsungPayError.SAMSUNG_PAY_SETUP_NOT_COMPLETED;
break;
}
} else {
if (statusCode == SPAY_NOT_READY) {
error = SamsungPayError.SAMSUNG_PAY_NOT_READY;
} else if (statusCode == SPAY_NOT_SUPPORTED) {
error = SamsungPayError.SAMSUNG_PAY_NOT_SUPPORTED;
}
}
samsungPayError = new SamsungPayException(error);
}
callback.onResult(statusCode, samsungPayError);
}
@Override
public void onFail(int errorCode, Bundle bundle) {
SamsungPayException exception = new SamsungPayException(errorCode);
callback.onResult(null, exception);
}
});
}
void getAcceptedCardBrands(final GetAcceptedCardBrandsCallback callback) {
paymentManager.requestCardInfo(new Bundle(), new PaymentManager.CardInfoListener() {
@Override
public void onResult(final List<CardInfo> cardInfos) {
Set<SpaySdk.Brand> spayAcceptedCardBrands = new HashSet<>();
if (cardInfos != null) {
for (CardInfo cardInfo : cardInfos) {
spayAcceptedCardBrands.add(cardInfo.getBrand());
}
}
// equivalent to getting the intersection of both sets
spayAcceptedCardBrands.retainAll(braintreeAcceptedCardBrands);
callback.onResult(new ArrayList<>(spayAcceptedCardBrands), null);
}
@Override
public void onFailure(int errorCode, Bundle bundle) {
Exception error = new SamsungPayException(errorCode);
callback.onResult(null, error);
}
});
}
void startSamsungPay(CustomSheetPaymentInfo customSheetPaymentInfo, final SamsungPayListener listener) {
paymentManager.startInAppPayWithCustomSheet(customSheetPaymentInfo, new PaymentManager.CustomSheetTransactionInfoListener() {
@Override
public void onCardInfoUpdated(CardInfo cardInfo, CustomSheet customSheet) {
paymentManager.updateSheet(customSheet);
listener.onSamsungPayCardInfoUpdated(cardInfo, customSheet);
}
@Override
public void onSuccess(CustomSheetPaymentInfo customSheetPaymentInfo, String s, Bundle bundle) {
try {
JSONObject json = new JSONObject(s);
SamsungPayNonce samsungPayNonce = SamsungPayNonce.fromJSON(json);
listener.onSamsungPayStartSuccess(samsungPayNonce, customSheetPaymentInfo);
} catch (JSONException e) {
listener.onSamsungPayStartError(e);
}
}
@Override
public void onFailure(int errorCode, Bundle bundle) {
if (errorCode == SpaySdk.ERROR_USER_CANCELED) {
UserCanceledException userCanceledError = new UserCanceledException("User canceled Samsung Pay.");
listener.onSamsungPayStartError(userCanceledError);
} else {
SamsungPayException samsungPayError = new SamsungPayException(errorCode);
listener.onSamsungPayStartError(samsungPayError);
}
}
});
}
}
|
vinz243/CompactdPlayer
|
app/src/main/java/io/compactd/player/adapter/AlbumsAdapter.java
|
package io.compactd.player.adapter;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.support.v4.util.Pair;
import android.support.v7.widget.PopupMenu;
import android.view.MenuItem;
import android.view.View;
import com.couchbase.lite.CouchbaseLiteException;
import java.util.Collections;
import java.util.List;
import io.compactd.client.CompactdClient;
import io.compactd.client.models.CompactdAlbum;
import io.compactd.client.models.CompactdModel;
import io.compactd.client.models.CompactdTrack;
import io.compactd.client.models.SyncOptions;
import io.compactd.player.R;
import io.compactd.player.glide.MediaCover;
import io.compactd.player.helper.MusicPlayerRemote;
import io.compactd.player.ui.views.ItemViewHolder;
import io.compactd.player.util.NavigationUtil;
import io.compactd.player.util.PreferenceUtil;
/**
* Created by vinz243 on 12/12/2017.
*/
public class AlbumsAdapter extends ModelAdapter<CompactdAlbum> implements SharedPreferences.OnSharedPreferenceChangeListener {
private boolean mLocalPlayback;
public AlbumsAdapter(Context context, LayoutType layoutType) {
super(context, layoutType);
mLocalPlayback = PreferenceUtil.getInstance(context).isLocalPlayback();
PreferenceUtil.getInstance(context).registerOnSharedPreferenceChangeListener(this);
}
@Override
protected MediaCover getMediaCover(CompactdAlbum item) {
return new MediaCover(item);
}
@Override
protected String getText(CompactdAlbum item) {
return item.getArtist().getName();
}
@Override
protected String getTitle(CompactdAlbum item) {
return item.getName();
}
@Override
protected PopupMenu inflateMenu(View view, CompactdAlbum album) {
PopupMenu popupMenu = new PopupMenu(context, view);
popupMenu.inflate(R.menu.menu_album);
popupMenu.getMenu().findItem(R.id.menu_sync_offline).setChecked(!album.isExcludedFromSync());
return popupMenu;
}
@Override
protected boolean onMenuOptionSelected(MenuItem item, CompactdAlbum album, ItemViewHolder holder) {
switch (item.getItemId()) {
case R.id.menu_goto_artist:
NavigationUtil.goToArtist((Activity) context, album.getArtist());
return true;
case R.id.menu_play_shuffle:
try {
List<CompactdTrack> queue = album.getTracks(CompactdModel.FindMode.OnlyIds);
Collections.shuffle(queue);
MusicPlayerRemote.getInstance(context).openQueue(queue, 0, true);
MusicPlayerRemote.getInstance(context).setShuffling(true);
return true;
} catch (CouchbaseLiteException e) {
e.printStackTrace();
}
case R.id.menu_play_next:
try {
List<CompactdTrack> queue = album.getTracks(CompactdModel.FindMode.OnlyIds);
MusicPlayerRemote.getInstance(context).insert(queue);
return true;
} catch (CouchbaseLiteException e) {
e.printStackTrace();
}
case R.id.menu_sync_offline:
item.setChecked(!item.isChecked());
album.setExcludedFromSync(!item.isChecked());
album.update();
updateStatus(holder, album);
return true;
}
return false;
}
@Override
protected void onItemSelected(CompactdAlbum current, int position, ItemViewHolder holder) {
super.onItemSelected(current, position, holder);
NavigationUtil.goToAlbum((Activity) context, current, Pair.create(holder.image, context.getString(R.string.transition_album_cover)));
}
@Override
protected int getStatusResource(CompactdAlbum item) {
return item.isExcludedFromSync() ? R.drawable.ic_sync_disabled_white_24dp : 0;
}
@Override
public void onBindViewHolder(ItemViewHolder holder, int position) {
super.onBindViewHolder(holder, position);
if (!isAlbumAvailable(position)) {
holder.layout.setAlpha(0.5f);
} else {
holder.layout.setAlpha(1f);
}
}
private boolean isAlbumAvailable(int position) {
if (!CompactdClient.getInstance().isOffline() && !mLocalPlayback) {
return true;
}
return !items.get(position).isExcludedFromSync();
}
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
if (PreferenceUtil.LOCAL_PLAYBACK.equals(key)) {
mLocalPlayback = PreferenceUtil.getInstance(context).isLocalPlayback();
notifyDataSetChanged();
}
}
}
|
dmgerman/camel
|
components/camel-jgroups/src/main/java/org/apache/camel/component/jgroups/cluster/JGroupsLockClusterService.java
|
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
end_comment
begin_package
DECL|package|org.apache.camel.component.jgroups.cluster
package|package
name|org
operator|.
name|apache
operator|.
name|camel
operator|.
name|component
operator|.
name|jgroups
operator|.
name|cluster
package|;
end_package
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|camel
operator|.
name|support
operator|.
name|cluster
operator|.
name|AbstractCamelClusterService
import|;
end_import
begin_class
DECL|class|JGroupsLockClusterService
specifier|public
class|class
name|JGroupsLockClusterService
extends|extends
name|AbstractCamelClusterService
argument_list|<
name|JGroupsLockClusterView
argument_list|>
block|{
DECL|field|DEFAULT_JGROUPS_CONFIG
specifier|private
specifier|static
specifier|final
name|String
name|DEFAULT_JGROUPS_CONFIG
init|=
literal|"locking.xml"
decl_stmt|;
DECL|field|DEFAULT_JGROUPS_CLUSTERNAME
specifier|private
specifier|static
specifier|final
name|String
name|DEFAULT_JGROUPS_CLUSTERNAME
init|=
literal|"jgroups-master"
decl_stmt|;
DECL|field|jgroupsConfig
specifier|private
name|String
name|jgroupsConfig
decl_stmt|;
DECL|field|jgroupsClusterName
specifier|private
name|String
name|jgroupsClusterName
decl_stmt|;
DECL|method|JGroupsLockClusterService ()
specifier|public
name|JGroupsLockClusterService
parameter_list|()
block|{
name|this
operator|.
name|jgroupsConfig
operator|=
name|DEFAULT_JGROUPS_CONFIG
expr_stmt|;
name|this
operator|.
name|jgroupsClusterName
operator|=
name|DEFAULT_JGROUPS_CLUSTERNAME
expr_stmt|;
block|}
DECL|method|JGroupsLockClusterService (String jgroupsConfig, String jgroupsClusterName)
specifier|public
name|JGroupsLockClusterService
parameter_list|(
name|String
name|jgroupsConfig
parameter_list|,
name|String
name|jgroupsClusterName
parameter_list|)
block|{
name|this
operator|.
name|jgroupsConfig
operator|=
name|jgroupsConfig
expr_stmt|;
name|this
operator|.
name|jgroupsClusterName
operator|=
name|jgroupsClusterName
expr_stmt|;
block|}
annotation|@
name|Override
DECL|method|createView (String namespace)
specifier|protected
name|JGroupsLockClusterView
name|createView
parameter_list|(
name|String
name|namespace
parameter_list|)
throws|throws
name|Exception
block|{
return|return
operator|new
name|JGroupsLockClusterView
argument_list|(
name|this
argument_list|,
name|namespace
argument_list|,
name|jgroupsConfig
argument_list|,
name|jgroupsClusterName
argument_list|)
return|;
block|}
DECL|method|getJgroupsConfig ()
specifier|public
name|String
name|getJgroupsConfig
parameter_list|()
block|{
return|return
name|jgroupsConfig
return|;
block|}
DECL|method|setJgroupsConfig (String jgroupsConfig)
specifier|public
name|void
name|setJgroupsConfig
parameter_list|(
name|String
name|jgroupsConfig
parameter_list|)
block|{
name|this
operator|.
name|jgroupsConfig
operator|=
name|jgroupsConfig
expr_stmt|;
block|}
DECL|method|getJgroupsClusterName ()
specifier|public
name|String
name|getJgroupsClusterName
parameter_list|()
block|{
return|return
name|jgroupsClusterName
return|;
block|}
DECL|method|setJgroupsClusterName (String jgroupsClusterName)
specifier|public
name|void
name|setJgroupsClusterName
parameter_list|(
name|String
name|jgroupsClusterName
parameter_list|)
block|{
name|this
operator|.
name|jgroupsClusterName
operator|=
name|jgroupsClusterName
expr_stmt|;
block|}
block|}
end_class
end_unit
|
huisunan/epic4j
|
src/main/java/com/hsn/epic4j/core/exception/TimeException.java
|
<filename>src/main/java/com/hsn/epic4j/core/exception/TimeException.java
package com.hsn.epic4j.core.exception;
public class TimeException extends RuntimeException{
public TimeException(String message) {
super(message);
}
}
|
kyuukimon/dormant-core
|
src/main/java/cn/com/dormant/service/core/misc/CommonValidator.java
|
<gh_stars>0
package cn.com.dormant.service.core.misc;
import java.util.Collection;
import java.util.Map;
/**
* <code>CommonUtils<code>
*
* @description: This class includes some common util validation methods and maybe called by lots of place in system
* @author: <NAME>(<EMAIL>)
* @creation: 2015/03/11
* @version: 1.0
*/
public class CommonValidator {
/**
* 校验字符串是否为空
*
* @param s 需要校验的字符串,如:名称等等
* @return true:为空
* false:不为空
*/
public static boolean isNull(String s) {
if (null == s || s.trim().length() == 0) {
return true;
}
return false;
}
/**
* 检查Collection对象是否为空
* 为空的可能包括集合为空,或者集合中没有对象
*
* @param c 需要检查的集合
* @param
* @return true表示为空,false表示不为空
*/
public static boolean isNull(Collection c) {
if (c == null || c.isEmpty()) {
return true;
}
return false;
}
/**
* 检查map是否为空
*
* @param map 需要检查的map
* @return true表示为空,false表示不为空
*/
public static boolean isNull(Map map) {
if (map == null || map.isEmpty()) {
return true;
}
return false;
}
}
|
VU-libtech/OLE-INST
|
ole-app/olefs/src/main/java/org/kuali/ole/gl/businessobject/PendingBalancesMove.java
|
/*
* Copyright 2006 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.ole.gl.businessobject;
import java.util.LinkedHashMap;
import org.kuali.rice.core.api.util.type.KualiDecimal;
import org.kuali.rice.krad.bo.PersistableBusinessObjectBase;
/**
* This class represents a pending balances move
*
*/
public class PendingBalancesMove extends PersistableBusinessObjectBase {
private String principalId;
private KualiDecimal appropriationBudget;
private KualiDecimal appropriationActual;
private KualiDecimal appropriationEncumbrance;
private KualiDecimal pendingBudget;
private KualiDecimal pendingActual;
private KualiDecimal pendingEncumbrance;
/**
* Default constructor.
*/
public PendingBalancesMove() {
}
/**
* Gets the principalId attribute.
*
* @return Returns the principalId
*/
public String getPrincipalId() {
return principalId;
}
/**
* Sets the principalId attribute.
*
* @param principalId The principalId to set.
*/
public void setPrincipalId(String principalId) {
this.principalId = principalId;
}
/**
* Gets the appropriationBudget attribute.
*
* @return Returns the appropriationBudget
*/
public KualiDecimal getAppropriationBudget() {
return appropriationBudget;
}
/**
* Sets the appropriationBudget attribute.
*
* @param appropriationBudget The appropriationBudget to set.
*/
public void setAppropriationBudget(KualiDecimal appropriationBudget) {
this.appropriationBudget = appropriationBudget;
}
/**
* Gets the appropriationActual attribute.
*
* @return Returns the appropriationActual
*/
public KualiDecimal getAppropriationActual() {
return appropriationActual;
}
/**
* Sets the appropriationActual attribute.
*
* @param appropriationActual The appropriationActual to set.
*/
public void setAppropriationActual(KualiDecimal appropriationActual) {
this.appropriationActual = appropriationActual;
}
/**
* Gets the appropriationEncumbrance attribute.
*
* @return Returns the appropriationEncumbrance
*/
public KualiDecimal getAppropriationEncumbrance() {
return appropriationEncumbrance;
}
/**
* Sets the appropriationEncumbrance attribute.
*
* @param appropriationEncumbrance The appropriationEncumbrance to set.
*/
public void setAppropriationEncumbrance(KualiDecimal appropriationEncumbrance) {
this.appropriationEncumbrance = appropriationEncumbrance;
}
/**
* Gets the pendingBudget attribute.
*
* @return Returns the pendingBudget
*/
public KualiDecimal getPendingBudget() {
return pendingBudget;
}
/**
* Sets the pendingBudget attribute.
*
* @param pendingBudget The pendingBudget to set.
*/
public void setPendingBudget(KualiDecimal pendingBudget) {
this.pendingBudget = pendingBudget;
}
/**
* Gets the pendingActual attribute.
*
* @return Returns the pendingActual
*/
public KualiDecimal getPendingActual() {
return pendingActual;
}
/**
* Sets the pendingActual attribute.
*
* @param pendingActual The pendingActual to set.
*/
public void setPendingActual(KualiDecimal pendingActual) {
this.pendingActual = pendingActual;
}
/**
* Gets the pendingEncumbrance attribute.
*
* @return Returns the pendingEncumbrance
*/
public KualiDecimal getPendingEncumbrance() {
return pendingEncumbrance;
}
/**
* Sets the pendingEncumbrance attribute.
*
* @param pendingEncumbrance The pendingEncumbrance to set.
*/
public void setPendingEncumbrance(KualiDecimal pendingEncumbrance) {
this.pendingEncumbrance = pendingEncumbrance;
}
/**
* @see org.kuali.rice.krad.bo.BusinessObjectBase#toStringMapper()
*/
protected LinkedHashMap toStringMapper_RICE20_REFACTORME() {
LinkedHashMap m = new LinkedHashMap();
m.put("principalId", this.principalId);
return m;
}
}
|
cassproject/cass-npm
|
src/org/schema/CovidTestingFacility.js
|
const schema = {};
schema.MedicalClinic = require("./MedicalClinic.js");
/**
* Schema.org/CovidTestingFacility
* A CovidTestingFacility is a [[MedicalClinic]] where testing for the COVID-19 Coronavirus
disease is available. If the facility is being made available from an established [[Pharmacy]], [[Hotel]], or other
non-medical organization, multiple types can be listed. This makes it easier to re-use existing schema.org information
about that place e.g. contact info, address, opening hours. Note that in an emergency, such information may not always be reliable.
*
* @author schema.org
* @class CovidTestingFacility
* @module org.schema
* @extends MedicalClinic
*/
module.exports = class CovidTestingFacility extends schema.MedicalClinic {
/**
* Constructor, automatically sets @context and @type.
*
* @constructor
*/
constructor() {
super();
this.setContextAndType("http://schema.org/","CovidTestingFacility");
}
}
|
feminabi/stayOnTrak
|
src/components/ContactBar.js
|
<filename>src/components/ContactBar.js
import React from 'react'
import Link from 'gatsby-link'
import '../stylesheets/components/contact-bar.scss'
const ContactBar = () => {
return (
<div className="contact-bar">
<h2>Reach out and get started today!</h2>
<h3>
Give us a call at <a href="tel:1-866-517-1417">1-866-517-1417</a>, email
us at <a href="mailto:<EMAIL>"><EMAIL></a>{' '}
or <Link to="/contact-us">book a call</Link>.
</h3>
</div>
)
}
export default ContactBar
|
zheyujie/Semantic-UI-React
|
docs/src/examples/elements/Input/Variations/InputExampleRightLabeledBasic.js
|
import React from 'react'
import { Input } from 'semantic-ui-react'
const InputExampleRightLabeledBasic = () => (
<Input
label={{ basic: true, content: 'kg' }}
labelPosition='right'
placeholder='Enter weight...'
/>
)
export default InputExampleRightLabeledBasic
|
llHoYall/Cpp_Playground
|
playground/function/higher-order_function.cpp
|
<gh_stars>0
/*******************************************************************************
* @brief Higher-order function in modern C++
* @author llHoYall <<EMAIL>>
* @version v1.0
* @history
* 2018.12.24 Created.
******************************************************************************/
/* Include Headers -----------------------------------------------------------*/
#include <iostream>
#include <vector>
#include <algorithm>
#include <iterator>
#include <numeric>
#include <functional>
/* Main Routine --------------------------------------------------------------*/
auto HigherOrderFunction() -> int {
// Higher-order Function: Map
std::vector<int> v1;
for (int i = 0; i < 5; ++i) {
v1.push_back(i);
}
std::vector<int> v2;
v2.resize(v1.size());
std::transform(std::begin(v1), std::end(v1), std::begin(v2), [](int i) {
return i * i;
});
std::cout << "v1 contains";
for (auto v : v1) {
std::cout << " " << v;
}
std::cout << std::endl;
std::cout << "v2 contains";
for (auto v : v2) {
std::cout << " " << v;
}
std::cout << std::endl << std::endl;
// Higher-order Function: Filter
std::vector<int> numbers;
for (int i = 0; i < 20; ++i) {
numbers.push_back(i);
}
std::cout << "The original numbers" << std::endl;
copy(std::begin(numbers), std::end(numbers), std::ostream_iterator<int>(std::cout, " "));
std::cout << std::endl;
std::vector<int> primes;
std::copy_if(std::begin(numbers), std::end(numbers), std::back_inserter(primes), [](int n) {
if (n < 2) {
return (n != 0) ? true : false;
} else {
for (int j = 2; j < n; ++j) {
if (n % j == 0) {
return false;
}
}
return true;
}
});
std::cout << "The primes numbers" << std::endl;
copy(std::begin(primes), std::end(primes), std::ostream_iterator<int>(std::cout, " "));
std::cout << std::endl;
std::vector<int> non_primes;
std::remove_copy_if(numbers.begin(), numbers.end(), std::back_inserter(non_primes), [](int n) {
if (n < 2) {
return (n != 0) ? true : false;
} else {
for (int j = 2; j < n; ++j) {
if (n % j == 0) {
return false;
}
}
return true;
}
});
std::cout << "The non-primes numbers" << std::endl;
copy(std::begin(non_primes), std::end(non_primes), std::ostream_iterator<int>(std::cout, " "));
std::cout << std::endl << std::endl;
// Higher-order Function: Fold
numbers = {0, 1, 2, 3, 4};
auto foldl = std::accumulate(std::begin(numbers), std::end(numbers), 0, std::plus<int>());
auto foldr = std::accumulate(std::rbegin(numbers), std::rend(numbers), 0, std::plus<int>());
std::cout << "fold left result = " << foldl << std::endl;
std::cout << "fold right result = " << foldr << std::endl;
std::cout << std::endl;
return 0;
}
|
Dongshanxu/OnlinePK
|
OnlinePK-iOS/NLiteAVDemo/Utils/Category/UIControl+repeatclick.h
|
//
// UIControl+repeatclick.h
// NLiteAVDemo
//
// Created by Ease on 2020/11/10.
// Copyright (c) 2021 NetEase, Inc. All rights reserved.
// Use of this source code is governed by a MIT license that can be found in the LICENSE file
#import <UIKit/UIKit.h>
@interface UIControl (repeatclick)
@property (nonatomic, assign) NSTimeInterval ne_acceptEventInterval;//添加点击事件的间隔时间
@property (nonatomic, assign) BOOL ne_ignoreEvent;//是否忽略点击事件,不响应点击事件
@end
|
lwhsu/swift
|
benchmark/utils/LibProc/LibProcIncludeSystemHeader.h
|
<filename>benchmark/utils/LibProc/LibProcIncludeSystemHeader.h
//===--- LibProcIncludeSystemHeader.h -------------------------------------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
// This file exists to include the not-yet-modularized libproc.h system header.
#if __has_include(<libproc.h>)
#include <libproc.h>
#else
#include <Availability.h>
#include <sys/resource.h>
// Some SDKs are missing the libproc.h header, despite this symbol being present.
int proc_pid_rusage(int pid, int flavor, rusage_info_t *buffer) __OSX_AVAILABLE_STARTING(__MAC_10_9, __IPHONE_7_0);
#endif
|
giko/teavm
|
teavm-samples/teavm-samples-storage/src/main/java/org/teavm/samples/storage/Application.java
|
<filename>teavm-samples/teavm-samples-storage/src/main/java/org/teavm/samples/storage/Application.java
/*
* Copyright 2015 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teavm.samples.storage;
import org.teavm.dom.browser.Storage;
import org.teavm.dom.browser.Window;
import org.teavm.dom.events.Event;
import org.teavm.dom.events.EventListener;
import org.teavm.dom.html.HTMLButtonElement;
import org.teavm.dom.html.HTMLDocument;
import org.teavm.dom.html.HTMLElement;
import org.teavm.dom.html.HTMLInputElement;
import org.teavm.jso.JS;
/**
*
* @author <NAME>
*/
public final class Application {
private static Window window = (Window)JS.getGlobal();
private static HTMLDocument document = window.getDocument();
private static Storage storage = window.getSessionStorage();
private Application() {
}
public static void main(String[] args) {
if (storage == null) {
window.alert("storage is not supported.");
}
HTMLButtonElement saveButton = (HTMLButtonElement)document.getElementById("save-button");
saveButton.addEventListener("click", new EventListener() {
@Override
public void handleEvent(Event evt) {
String key = ((HTMLInputElement)document.getElementById("key")).getValue();
String value = ((HTMLInputElement)document.getElementById("value")).getValue();
if (key != null && key.length() > 0 && value != null && value.length() > 0) {
storage.setItem(key, value);
draw();
}
}
});
HTMLButtonElement deleteButton = (HTMLButtonElement)document.getElementById("delete-button");
deleteButton.addEventListener("click", new EventListener() {
@Override
public void handleEvent(Event evt) {
String key = ((HTMLInputElement)document.getElementById("key")).getValue();
if (key != null && key.length() > 0) {
storage.removeItem(key);
draw();
}
}
});
HTMLButtonElement deleteAllButton = (HTMLButtonElement)document.getElementById("delete-all-button");
deleteAllButton.addEventListener("click", new EventListener() {
@Override
public void handleEvent(Event evt) {
storage.clear();
draw();
}
});
draw();
}
private static void draw() {
HTMLElement tbody = document.getElementById("list");
while (tbody.getFirstChild() != null) {
tbody.removeChild(tbody.getFirstChild());
}
for (int i = 0; i < storage.getLength(); i++) {
String key = storage.key(i);
String value = storage.getItem(key);
HTMLElement tdKey = document.createElement("td");
tdKey.appendChild(document.createTextNode(key));
HTMLElement tdValue = document.createElement("td");
tdValue.appendChild(document.createTextNode(value));
HTMLElement tr = document.createElement("tr");
tr.appendChild(tdKey);
tr.appendChild(tdValue);
tbody.appendChild(tr);
}
}
}
|
slliac/Ljus4Food
|
application/html/search/functions_3.js
|
var searchData=
[
['file_5fget_5fcontents_5fcurl_5feadam',['file_get_contents_curl_eadam',['../class_a_p_irequest.html#aa3376c3f771a5cb3ad5356d22674a5ad',1,'APIrequest']]],
['file_5fget_5fcontents_5fcurl_5fz',['file_get_contents_curl_z',['../class_a_p_irequest.html#a65f97cf2471736a6999cefebea218a03',1,'APIrequest']]],
['form',['form',['../class_welcome.html#a4d5123cf815fd723d6dfdcb2c16fcc42',1,'Welcome']]]
];
|
igorscosta/rest4mex
|
log4mex/src/main/java/org/aksw/mex/log4mex/Execution.java
|
/**
* Copyright (C) 2014 - 2016, <NAME>
*
* This file is part of LOG4MEX.
*
* LOG4MEX is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* LOG4MEX is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.aksw.mex.log4mex;
import com.google.common.collect.Collections2;
import com.google.common.collect.Iterables;
import org.aksw.mex.log4mex.algo.AlgorithmVO;
import org.aksw.mex.log4mex.core.ExampleVO;
import org.aksw.mex.log4mex.core.PhaseVO;
import org.aksw.mex.log4mex.perf.IMeasure;
import org.aksw.mex.log4mex.perf.example.ExamplePerformanceMeasureVO;
import org.aksw.mex.log4mex.perf.overall.*;
import org.aksw.mex.util.MEXEnum;
import org.apache.commons.lang3.EnumUtils;
import java.util.*;
/**
* Created by esteves on 26.06.15.
*/
public abstract class Execution extends InstanceObjects {
protected String _id;
protected String _targetClass;
protected Date _startedAtTime;
protected Date _endedAtTime;
protected String _errorMessage;
protected ExperimentConfigurationVO _expConf;
protected PhaseVO _phase;
protected AlgorithmVO _algo;
protected List<ExampleVO> _examples;
protected List<Measure> _performances;
public Execution(){
this._performances = new ArrayList<>();
this._examples = new ArrayList();
}
/**********************************************************************************************************************************************
* getters
**********************************************************************************************************************************************/
/**
* gets the id of an execution
* @return
*/
protected String getId() {
return _id;
}
/**
* gets the target class for given execution
* @return
*/
protected String getTargetClass() {
return _targetClass;
}
/**
* gets the start time for given execution
* @return
*/
protected Date getStartedAtTime() {
return _startedAtTime;
}
/**
* gets end time for given execution
* @return
*/
protected Date getEndedAtTime() {
return _endedAtTime;
}
/**
* gets the Phase associated to an Execution
* @return
*/
protected PhaseVO getPhase() {
return _phase;
}
/**
* gets the Algorithm associated to an Execution
* @return
*/
protected AlgorithmVO getAlgorithm() {
return _algo;
}
/**
* gets the ExperimentConfiguration which groups an Execution
* @return
*/
private ExperimentConfigurationVO getExpConf() {
return _expConf;
}
/**
* gets the Performance measures for an Execution
* @return
*/
protected List<Measure> getPerformances() {
return _performances;
}
/**
* gets the Example (in case of mexcore:SingleExecution) or Examples (in case of a mexcore:OverallExecution)
* associated to an Execution. It is worth to note that we always have a list of Examples once we each cell
* in a table is an mexcore:Example class. mexcore:datasetColumn and mexcore:datasetRow indicate the position
* and prov:value contains the desired value of a cell
* @return
*/
protected List<ExampleVO> getExamples(){
return this._examples;
}
protected List<Measure> getPerformances(Class<? extends IMeasure> klass){
List<Measure> _p = new ArrayList<>();
for(Iterator<? extends Measure> i = this._performances.iterator(); i.hasNext(); ) {
Measure item = i.next();
if (item.getClass().equals(klass))
_p.add(item);
}
return _p;
}
protected String getErrorMessage(){ return this._errorMessage;}
/**********************************************************************************************************************************************
* setters
**********************************************************************************************************************************************/
public abstract void setStartsAtPosition(String value);
public abstract void setEndsAtPosition(String value);
public void setTargetClass(String _targetClass) {
this._targetClass = _targetClass;
}
public void setStartDate(Date value){
this._startedAtTime = value;
}
public void setEndDate(Date value){
this._endedAtTime = value;
}
public void setAlgorithm(AlgorithmVO value){
this._algo = value;
}
/**
*
* @param algorithmidentifier (instanceName or algorithmID)
* @return
* @throws Exception
*/
public boolean setAlgorithm(String algorithmidentifier) throws Exception{
try{
//check whether the algorithm exists into the experiment configuration
Collection<AlgorithmVO> t = Collections2.filter(this.getExpConf().getAlgorithms(),
p -> (p instanceof AlgorithmVO && (p.getIndividualName().equals(algorithmidentifier) || p.getIdentifier().equals(algorithmidentifier))));
if (t != null && t.size() > 0){
this._algo = Iterables.get(t, 0);
}else{
throw new Exception("The algorithm " + algorithmidentifier + " does not belong to the experiment");
}
}catch (Exception e){
throw new Exception(e);
}
return true;
}
public void setErrorMessage(String value){
this._errorMessage = value;
}
/**********************************************************************************************************************************************
* functions
***********************************************************************************************************************************************/
/***
* add UserDefinedMeasure
* @param id
* @param description
* @param formula
* @param value
* @return
*/
public boolean addPerformance(String id, String description, String formula, Double value){
Measure m = new UserDefinedMeasureVO(id, description, formula, value);
return this._performances.add(m);
}
/**
* add PerformanceMeasure
* @param exampleIdentifier
* @param predictedValue
* @param realValue
* @return
*/
public boolean addPerformance(String exampleIdentifier, String predictedValue, String realValue){
ExamplePerformanceMeasureVO m = new ExamplePerformanceMeasureVO(exampleIdentifier, predictedValue, realValue);
return this._performances.add(m);
}
/***
* add PerformanceMeasure (related to OverallExecution)
* @param m
* @param v
* @return
*/
public void addPerformance(MEXEnum.EnumMeasures m, double v) throws Exception{
String type = "";
String p = m.toString().replace("_","").toUpperCase();
String paux = m.toString().replace("_","");
boolean ret = false;
try{
type = "cla";
if (EnumUtils.isValidEnum(MEXEnum.EnumClassificationMeasure.class, p) == false){
type = "reg";
if (EnumUtils.isValidEnum(MEXEnum.EnumRegressionMeasure.class, p) == false){
type = "sta";
if (EnumUtils.isValidEnum(MEXEnum.EnumStatisticalMeasure.class, p) == false){
type = "clu";
if (EnumUtils.isValidEnum(MEXEnum.EnumClusteringMeasure.class, p) == false){
throw new Exception("measure has not been found: " + m.toString());}
}
}
}
switch (type) {
case "cla":
addClassificationPerformance(paux,v);
break;
case "reg":
addRegressionPerformance(paux,v);
break;
case "sta":
addStatisticalPerformance(paux,v);
break;
case "clu":
addClusteringPerformance(paux,v);
break;
default:
throw new Exception("measure has not been found: " + p);
}
}catch (Exception e){
throw (e);
}
}
private boolean addClassificationPerformance(String p, double value) {
ClassificationMeasureVO m = new ClassificationMeasureVO();
m.setValue(value);
m.setName(p);
return this._performances.add(m);
}
private boolean addRegressionPerformance(String p, double value) {
RegressionMeasureVO m = new RegressionMeasureVO();
m.setValue(value);
m.setName(p);
return this._performances.add(m);
}
private boolean addStatisticalPerformance(String p, double value) {
StatisticalMeasureVO m = new StatisticalMeasureVO();
m.setValue(value);
m.setName(p);
return this._performances.add(m);
}
private boolean addClusteringPerformance(String p, double value) {
ClusteringMeasureVO m = new ClusteringMeasureVO();
m.setValue(value);
m.setName(p);
return this._performances.add(m);
}
/**
* Add a dataset example to the execution
* @param id
* @param value value of the example item
* @param datasetRow indicates the dataset row
* @param datasetColumn indicates the dataset column
* @return
* @throws Exception
*/
public boolean addDatasetExample(String id, String value, long datasetRow, long datasetColumn, MEXEnum.EnumExamplesType type) throws Exception{
try {
ExampleVO example = new ExampleVO();
example.setId(id);
example.setValue(value);
example.setDatasetRow(datasetRow);
example.setDatasetColumn(datasetColumn);
example.setExampleType(type.toString());
this._examples.add(example);
return true;
}catch (Exception e){
throw e;
}
}
}
|
Nyarum/noterius
|
network/common/world_structs.go
|
package common
type EntityEvent struct {
EnityID uint32
EnityType uint8
EventID uint16
EventName string
}
|
iamzken/aliyun-openapi-cpp-sdk
|
cas/src/model/DescribeOrderDetailResult.cc
|
<reponame>iamzken/aliyun-openapi-cpp-sdk<filename>cas/src/model/DescribeOrderDetailResult.cc<gh_stars>10-100
/*
* Copyright 2009-2017 Alibaba Cloud All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <alibabacloud/cas/model/DescribeOrderDetailResult.h>
#include <json/json.h>
using namespace AlibabaCloud::Cas;
using namespace AlibabaCloud::Cas::Model;
DescribeOrderDetailResult::DescribeOrderDetailResult() :
ServiceResult()
{}
DescribeOrderDetailResult::DescribeOrderDetailResult(const std::string &payload) :
ServiceResult()
{
parse(payload);
}
DescribeOrderDetailResult::~DescribeOrderDetailResult()
{}
void DescribeOrderDetailResult::parse(const std::string &payload)
{
Json::Reader reader;
Json::Value value;
reader.parse(payload, value);
setRequestId(value["RequestId"].asString());
if(!value["Id"].isNull())
id_ = std::stol(value["Id"].asString());
if(!value["SourceType"].isNull())
sourceType_ = value["SourceType"].asString();
if(!value["CertType"].isNull())
certType_ = value["CertType"].asString();
if(!value["InstanceId"].isNull())
instanceId_ = value["InstanceId"].asString();
if(!value["Year"].isNull())
year_ = std::stol(value["Year"].asString());
if(!value["OrderId"].isNull())
orderId_ = std::stol(value["OrderId"].asString());
if(!value["OrderType"].isNull())
orderType_ = value["OrderType"].asString();
if(!value["BrandName"].isNull())
brandName_ = value["BrandName"].asString();
if(!value["BuyDate"].isNull())
buyDate_ = std::stol(value["BuyDate"].asString());
if(!value["StatusCode"].isNull())
statusCode_ = value["StatusCode"].asString();
if(!value["Domain"].isNull())
domain_ = value["Domain"].asString();
if(!value["ProductAliasName"].isNull())
productAliasName_ = value["ProductAliasName"].asString();
if(!value["DomainCount"].isNull())
domainCount_ = std::stoi(value["DomainCount"].asString());
if(!value["WildDomainCount"].isNull())
wildDomainCount_ = std::stoi(value["WildDomainCount"].asString());
if(!value["VerifyStatus"].isNull())
verifyStatus_ = std::stoi(value["VerifyStatus"].asString());
if(!value["MaybeIssueDate"].isNull())
maybeIssueDate_ = std::stol(value["MaybeIssueDate"].asString());
if(!value["BeforeDate"].isNull())
beforeDate_ = std::stol(value["BeforeDate"].asString());
if(!value["AfterDate"].isNull())
afterDate_ = std::stol(value["AfterDate"].asString());
if(!value["ShowCancel"].isNull())
showCancel_ = value["ShowCancel"].asString() == "true";
if(!value["ShowRefund"].isNull())
showRefund_ = value["ShowRefund"].asString() == "true";
}
int DescribeOrderDetailResult::getVerifyStatus()const
{
return verifyStatus_;
}
long DescribeOrderDetailResult::getAfterDate()const
{
return afterDate_;
}
bool DescribeOrderDetailResult::getShowRefund()const
{
return showRefund_;
}
std::string DescribeOrderDetailResult::getInstanceId()const
{
return instanceId_;
}
std::string DescribeOrderDetailResult::getSourceType()const
{
return sourceType_;
}
std::string DescribeOrderDetailResult::getCertType()const
{
return certType_;
}
int DescribeOrderDetailResult::getWildDomainCount()const
{
return wildDomainCount_;
}
long DescribeOrderDetailResult::getOrderId()const
{
return orderId_;
}
std::string DescribeOrderDetailResult::getStatusCode()const
{
return statusCode_;
}
std::string DescribeOrderDetailResult::getBrandName()const
{
return brandName_;
}
std::string DescribeOrderDetailResult::getOrderType()const
{
return orderType_;
}
long DescribeOrderDetailResult::getYear()const
{
return year_;
}
bool DescribeOrderDetailResult::getShowCancel()const
{
return showCancel_;
}
std::string DescribeOrderDetailResult::getProductAliasName()const
{
return productAliasName_;
}
long DescribeOrderDetailResult::getMaybeIssueDate()const
{
return maybeIssueDate_;
}
long DescribeOrderDetailResult::getId()const
{
return id_;
}
std::string DescribeOrderDetailResult::getDomain()const
{
return domain_;
}
long DescribeOrderDetailResult::getBuyDate()const
{
return buyDate_;
}
int DescribeOrderDetailResult::getDomainCount()const
{
return domainCount_;
}
long DescribeOrderDetailResult::getBeforeDate()const
{
return beforeDate_;
}
|
Matt-Doyle/PythonError
|
src/pythonError/errorInterpreter/inputPhaser/InputParser.java
|
<reponame>Matt-Doyle/PythonError<gh_stars>0
package pythonError.errorInterpreter.inputPhaser;
import pythonError.errorInterpreter.errorSearch.BoyerMoore;
import pythonError.errorInterpreter.pythonInterpreter.Python;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.regex.Pattern;
/**
* Created by Christopher on 30/05/2016.
*/
public class InputParser {
public static ArrayList<Line> parseCode(String input) {
ArrayList<Line> lines = new ArrayList<>();
Arrays.stream(input.split("\n")).forEach(i -> lines.add(new Line(input))); // don't judge me, I just wanted this to look cool -Matt
return lines;
}
public static LinedError parseError(String traceback) {
Error error = Python.getError("BaseException");
for (Error thisError : Python.getExplanations().values()) {
ArrayList<Integer> CharacterPositions = BoyerMoore.precomputeSearch(thisError.getBmPattern().getPattern(), traceback, thisError.getBmPattern().getBCRTable(), thisError.getBmPattern().getGSRTable());
if (CharacterPositions.size() > 0) {
error = Python.getError(thisError.getBmPattern().getPattern());
break;
}
}
ArrayList<String> tracebackWords = new ArrayList<>(Arrays.asList(traceback.split(" ")));
int lineNumber = Integer.parseInt(tracebackWords.get(tracebackWords.lastIndexOf("line") + 1).replace(",", ""));
return new LinedError(error, lineNumber);
}
}
class CodeParser {
static String[] KEYWORDS = {"False", "class", "finally", "is", "return", "None", "continue", "for", "lambda", "try", "True", "def", "from", "nonlocal",
"while", "and", "del", "global", "not", "with", "as", "elif", "if", "or", "yield", "assert", "else", "import", "pass", "break", "except", "is", "raise"};
static String[] OPERATORS = {"+", "-", "*", "/", "//", "%", "**", "==", "!=", "<", ">", "<=", ">=", "is", "in", "and", "or", "not", "&", "|", "~", "^", "<<",
">>"};
static String[] DELIMITERS = {};
static String[] LITERALS = {"int", "float", "imaginary", "string", "byte"};
private int getSubStrOffset(String input, String subStr, int offset) {
if (offset + (subStr.length() - 1) > input.length()) { // If offeset + the length of subStr is bigger than input return 0
return 0;
}
for (int j = 0; j < subStr.length(); j++) { // Loop through subStr and compare it to input
if (input.charAt(offset + j) != subStr.charAt(j)) {
return 0;
}
}
if (offset + subStr.length() > input.length()) { // If subStr goes right up until the end of input return a correct offset
return offset + subStr.length() - 1;
} else if (input.charAt(offset + subStr.length()) == ' ' || input.charAt(offset + subStr.length()) == '\n') { // If there is a space or new line after subStr return the correct offset
return offset + subStr.length() - 1;
}
return 0;
}
private int wordSearch(String input, int i, String[] searchTerms) {
String currentChar = input.substring(i, i);
if (currentChar.matches("[A-Za-z_]")) {
for (String keyWord : searchTerms) {
int offset = getSubStrOffset(input, keyWord, i);
if (offset > 0)
return offset;
}
}
return 0;
}
private int isKeyword(String input, int i) {
return wordSearch(input, i, KEYWORDS);
}
private int isLiteral(String input, int i) {
return wordSearch(input, i, LITERALS);
}
public ArrayList<Token> constructTokens(String input) {
input.replace(",\n", " ");
input.replace("\\\n", " ");
ArrayList<Token> tokenList = new ArrayList<>();
int previousIndent = 0;
int currentIndent = 0;
boolean isNewLine = true;
for (int i = 0; i < input.length(); i++) {
if (isNewLine) {
int keywordOffset = isKeyword(input, i);
if (keywordOffset > 0) {
tokenList.add(new Token(tokens.keyWord));
i += keywordOffset;
break;
} else {
int literalOffset = isLiteral(input, i);
}
}
}
return new ArrayList<>();
}
}
|
hervewenjie/mysql
|
storage/innobase/log/log0online.cc
|
/*****************************************************************************
Copyright (c) 2011-2012 Percona Inc. All Rights Reserved.
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; version 2 of the License.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place, Suite 330, Boston, MA 02111-1307 USA
*****************************************************************************/
/**************************************************//**
@file log/log0online.cc
Online database log parsing for changed page tracking
*******************************************************/
#include "log0online.h"
#include "my_dbug.h"
#include "log0recv.h"
#include "mach0data.h"
#include "mtr0log.h"
#include "srv0srv.h"
#include "srv0start.h"
#include "trx0sys.h"
#include "ut0rbt.h"
#ifdef __WIN__
/* error LNK2001: unresolved external symbol _debug_sync_C_callback_ptr */
# define DEBUG_SYNC_C(dummy) ((void) 0)
#else
# include "m_string.h" /* for my_sys.h */
# include "my_sys.h" /* DEBUG_SYNC_C */
#endif
enum { FOLLOW_SCAN_SIZE = 4 * (UNIV_PAGE_SIZE_MAX) };
#ifdef UNIV_PFS_MUTEX
/* Key to register log_bmp_sys->mutex with PFS */
UNIV_INTERN mysql_pfs_key_t log_bmp_sys_mutex_key;
#endif /* UNIV_PFS_MUTEX */
/** Log parsing and bitmap output data structure */
struct log_bitmap_struct {
byte* read_buf_ptr; /*!< Unaligned log read buffer */
byte* read_buf; /*!< log read buffer */
byte parse_buf[RECV_PARSING_BUF_SIZE];
/*!< log parse buffer */
byte* parse_buf_end; /*!< parse buffer position where the
next read log data should be copied to.
If the previous log records were fully
parsed, it points to the start,
otherwise points immediatelly past the
end of the incomplete log record. */
char bmp_file_home[FN_REFLEN];
/*!< directory for bitmap files */
log_online_bitmap_file_t out; /*!< The current bitmap file */
ulint out_seq_num; /*!< the bitmap file sequence number */
lsn_t start_lsn; /*!< the LSN of the next unparsed
record and the start of the next LSN
interval to be parsed. */
lsn_t end_lsn; /*!< the end of the LSN interval to be
parsed, equal to the next checkpoint
LSN at the time of parse */
lsn_t next_parse_lsn; /*!< the LSN of the next unparsed
record in the current parse */
ib_rbt_t* modified_pages; /*!< the current modified page set,
organized as the RB-tree with the keys
of (space, 4KB-block-start-page-id)
pairs */
ib_rbt_node_t* page_free_list; /*!< Singly-linked list of freed nodes
of modified_pages tree for later
reuse. Nodes are linked through
ib_rbt_node_t.left as this field has
both the correct type and the tree does
not mind its overwrite during
rbt_next() tree traversal. */
ib_mutex_t mutex; /*!< mutex protecting all the fields.*/
};
/* The log parsing and bitmap output struct instance */
static struct log_bitmap_struct* log_bmp_sys;
/** File name stem for bitmap files. */
static const char* bmp_file_name_stem = "ib_modified_log_";
/** File name template for bitmap files. The 1st format tag is a directory
name, the 2nd tag is the stem, the 3rd tag is a file sequence number, the 4th
tag is the start LSN for the file. */
static const char* bmp_file_name_template = "%s%s%lu_%llu.xdb";
/* On server startup with empty database srv_start_lsn == 0, in
which case the first LSN of actual log records will be this. */
#define MIN_TRACKED_LSN ((LOG_START_LSN) + (LOG_BLOCK_HDR_SIZE))
/* Tests if num bit of bitmap is set */
#define IS_BIT_SET(bitmap, num) \
(*((bitmap) + ((num) >> 3)) & (1UL << ((num) & 7UL)))
/** The bitmap file block size in bytes. All writes will be multiples of this.
*/
enum {
MODIFIED_PAGE_BLOCK_SIZE = 4096
};
/** Offsets in a file bitmap block */
enum {
MODIFIED_PAGE_IS_LAST_BLOCK = 0,/* 1 if last block in the current
write, 0 otherwise. */
MODIFIED_PAGE_START_LSN = 4, /* The starting tracked LSN of this and
other blocks in the same write */
MODIFIED_PAGE_END_LSN = 12, /* The ending tracked LSN of this and
other blocks in the same write */
MODIFIED_PAGE_SPACE_ID = 20, /* The space ID of tracked pages in
this block */
MODIFIED_PAGE_1ST_PAGE_ID = 24, /* The page ID of the first tracked
page in this block */
MODIFIED_PAGE_BLOCK_UNUSED_1 = 28,/* Unused in order to align the start
of bitmap at 8 byte boundary */
MODIFIED_PAGE_BLOCK_BITMAP = 32,/* Start of the bitmap itself */
MODIFIED_PAGE_BLOCK_UNUSED_2 = MODIFIED_PAGE_BLOCK_SIZE - 8,
/* Unused in order to align the end of
bitmap at 8 byte boundary */
MODIFIED_PAGE_BLOCK_CHECKSUM = MODIFIED_PAGE_BLOCK_SIZE - 4
/* The checksum of the current block */
};
/** Length of the bitmap data in a block in bytes */
enum { MODIFIED_PAGE_BLOCK_BITMAP_LEN
= MODIFIED_PAGE_BLOCK_UNUSED_2 - MODIFIED_PAGE_BLOCK_BITMAP };
/** Length of the bitmap data in a block in page ids */
enum { MODIFIED_PAGE_BLOCK_ID_COUNT = MODIFIED_PAGE_BLOCK_BITMAP_LEN * 8 };
/****************************************************************//**
Provide a comparisson function for the RB-tree tree (space,
block_start_page) pairs. Actual implementation does not matter as
long as the ordering is full.
@return -1 if p1 < p2, 0 if p1 == p2, 1 if p1 > p2
*/
static
int
log_online_compare_bmp_keys(
/*========================*/
const void* p1, /*!<in: 1st key to compare */
const void* p2) /*!<in: 2nd key to compare */
{
const byte *k1 = (const byte *)p1;
const byte *k2 = (const byte *)p2;
ulint k1_space = mach_read_from_4(k1 + MODIFIED_PAGE_SPACE_ID);
ulint k2_space = mach_read_from_4(k2 + MODIFIED_PAGE_SPACE_ID);
if (k1_space == k2_space) {
ulint k1_start_page
= mach_read_from_4(k1 + MODIFIED_PAGE_1ST_PAGE_ID);
ulint k2_start_page
= mach_read_from_4(k2 + MODIFIED_PAGE_1ST_PAGE_ID);
return k1_start_page < k2_start_page
? -1 : k1_start_page > k2_start_page ? 1 : 0;
}
return k1_space < k2_space ? -1 : 1;
}
/****************************************************************//**
Set a bit for tracked page in the bitmap. Expand the bitmap tree as
necessary. */
static
void
log_online_set_page_bit(
/*====================*/
ulint space, /*!<in: log record space id */
ulint page_no)/*!<in: log record page id */
{
ulint block_start_page;
ulint block_pos;
uint bit_pos;
ib_rbt_bound_t tree_search_pos;
byte search_page[MODIFIED_PAGE_BLOCK_SIZE];
byte *page_ptr;
ut_ad(mutex_own(&log_bmp_sys->mutex));
ut_a(space != ULINT_UNDEFINED);
ut_a(page_no != ULINT_UNDEFINED);
block_start_page = page_no / MODIFIED_PAGE_BLOCK_ID_COUNT
* MODIFIED_PAGE_BLOCK_ID_COUNT;
block_pos = block_start_page ? (page_no % block_start_page / 8)
: (page_no / 8);
bit_pos = page_no % 8;
mach_write_to_4(search_page + MODIFIED_PAGE_SPACE_ID, space);
mach_write_to_4(search_page + MODIFIED_PAGE_1ST_PAGE_ID,
block_start_page);
if (!rbt_search(log_bmp_sys->modified_pages, &tree_search_pos,
search_page)) {
page_ptr = rbt_value(byte, tree_search_pos.last);
}
else {
ib_rbt_node_t *new_node;
if (log_bmp_sys->page_free_list) {
new_node = log_bmp_sys->page_free_list;
log_bmp_sys->page_free_list = new_node->left;
}
else {
new_node = static_cast<ib_rbt_node_t *>
(ut_malloc
(SIZEOF_NODE(log_bmp_sys->modified_pages)));
}
memset(new_node, 0, SIZEOF_NODE(log_bmp_sys->modified_pages));
page_ptr = rbt_value(byte, new_node);
mach_write_to_4(page_ptr + MODIFIED_PAGE_SPACE_ID, space);
mach_write_to_4(page_ptr + MODIFIED_PAGE_1ST_PAGE_ID,
block_start_page);
rbt_add_preallocated_node(log_bmp_sys->modified_pages,
&tree_search_pos, new_node);
}
page_ptr[MODIFIED_PAGE_BLOCK_BITMAP + block_pos] |= (1U << bit_pos);
}
/****************************************************************//**
Calculate a bitmap block checksum. Algorithm borrowed from
log_block_calc_checksum.
@return checksum */
UNIV_INLINE
ulint
log_online_calc_checksum(
/*=====================*/
const byte* block) /*!<in: bitmap block */
{
ulint sum;
ulint sh;
ulint i;
sum = 1;
sh = 0;
for (i = 0; i < MODIFIED_PAGE_BLOCK_CHECKSUM; i++) {
ulint b = block[i];
sum &= 0x7FFFFFFFUL;
sum += b;
sum += b << sh;
sh++;
if (sh > 24) {
sh = 0;
}
}
return sum;
}
/****************************************************************//**
Read one bitmap data page and check it for corruption.
@return TRUE if page read OK, FALSE if I/O error */
static
ibool
log_online_read_bitmap_page(
/*========================*/
log_online_bitmap_file_t *bitmap_file, /*!<in/out: bitmap
file */
byte *page, /*!<out: read page.
Must be at least
MODIFIED_PAGE_BLOCK_SIZE
bytes long */
ibool *checksum_ok) /*!<out: TRUE if page
checksum OK */
{
ulint checksum;
ulint actual_checksum;
ibool success;
ut_a(bitmap_file->size >= MODIFIED_PAGE_BLOCK_SIZE);
ut_a(bitmap_file->offset
<= bitmap_file->size - MODIFIED_PAGE_BLOCK_SIZE);
ut_a(bitmap_file->offset % MODIFIED_PAGE_BLOCK_SIZE == 0);
success = os_file_read(bitmap_file->file, page, bitmap_file->offset,
MODIFIED_PAGE_BLOCK_SIZE);
if (UNIV_UNLIKELY(!success)) {
/* The following call prints an error message */
os_file_get_last_error(TRUE);
ib_logf(IB_LOG_LEVEL_WARN,
"failed reading changed page bitmap file \'%s\'\n",
bitmap_file->name);
return FALSE;
}
bitmap_file->offset += MODIFIED_PAGE_BLOCK_SIZE;
ut_ad(bitmap_file->offset <= bitmap_file->size);
checksum = mach_read_from_4(page + MODIFIED_PAGE_BLOCK_CHECKSUM);
actual_checksum = log_online_calc_checksum(page);
*checksum_ok = (checksum == actual_checksum);
return TRUE;
}
/****************************************************************//**
Get the last tracked fully LSN from the bitmap file by reading
backwards untile a correct end page is found. Detects incomplete
writes and corrupted data. Sets the start output position for the
written bitmap data.
Multiple bitmap files are handled using the following assumptions:
1) Only the last file might be corrupted. In case where no good data was found
in the last file, assume that the next to last file is OK. This assumption
does not limit crash recovery capability in any way.
2) If the whole of the last file was corrupted, assume that the start LSN in
its name is correct and use it for (re-)tracking start.
@return the last fully tracked LSN */
static
lsn_t
log_online_read_last_tracked_lsn(void)
/*==================================*/
{
byte page[MODIFIED_PAGE_BLOCK_SIZE];
ibool is_last_page = FALSE;
ibool checksum_ok = FALSE;
lsn_t result;
os_offset_t read_offset = log_bmp_sys->out.offset;
while (!checksum_ok && read_offset > 0 && !is_last_page)
{
read_offset -= MODIFIED_PAGE_BLOCK_SIZE;
log_bmp_sys->out.offset = read_offset;
if (!log_online_read_bitmap_page(&log_bmp_sys->out, page,
&checksum_ok)) {
checksum_ok = FALSE;
result = 0;
break;
}
if (checksum_ok) {
is_last_page
= mach_read_from_4
(page + MODIFIED_PAGE_IS_LAST_BLOCK);
} else {
ib_logf(IB_LOG_LEVEL_WARN,
"corruption detected in \'%s\' at offset "
UINT64PF "\n",
log_bmp_sys->out.name, read_offset);
}
};
result = (checksum_ok && is_last_page)
? mach_read_from_8(page + MODIFIED_PAGE_END_LSN) : 0;
/* Truncate the output file to discard the corrupted bitmap data, if
any */
if (!os_file_set_eof_at(log_bmp_sys->out.file,
log_bmp_sys->out.offset)) {
ib_logf(IB_LOG_LEVEL_WARN,
"failed truncating changed page bitmap file \'%s\' to "
UINT64PF " bytes\n",
log_bmp_sys->out.name, log_bmp_sys->out.offset);
result = 0;
}
return result;
}
/****************************************************************//**
Safely write the log_sys->tracked_lsn value. Uses atomic operations
if available, otherwise this field is protected with the log system
mutex. The reader counterpart function is log_get_tracked_lsn() in
log0log.c. */
UNIV_INLINE
void
log_set_tracked_lsn(
/*================*/
lsn_t tracked_lsn) /*!<in: new value */
{
#ifdef HAVE_ATOMIC_BUILTINS_64
/* Single writer, no data race here */
lsn_t old_value = os_atomic_increment_uint64(&log_sys->tracked_lsn, 0);
(void) os_atomic_increment_uint64(&log_sys->tracked_lsn,
tracked_lsn - old_value);
#else
mutex_enter(&log_sys->mutex);
log_sys->tracked_lsn = tracked_lsn;
mutex_exit(&log_sys->mutex);
#endif
}
/*********************************************************************//**
Check if missing, if any, LSN interval can be read and tracked using the
current LSN value, the LSN value where the tracking stopped, and the log group
capacity.
@return TRUE if the missing interval can be tracked or if there's no missing
data. */
static
ibool
log_online_can_track_missing(
/*=========================*/
lsn_t last_tracked_lsn, /*!<in: last tracked LSN */
lsn_t tracking_start_lsn) /*!<in: current LSN */
{
/* last_tracked_lsn might be < MIN_TRACKED_LSN in the case of empty
bitmap file, handle this too. */
last_tracked_lsn = ut_max(last_tracked_lsn, MIN_TRACKED_LSN);
if (last_tracked_lsn > tracking_start_lsn) {
ib_logf(IB_LOG_LEVEL_ERROR,
"last tracked LSN " LSN_PF " is ahead of tracking "
"start LSN " LSN_PF ". This can be caused by "
"mismatched bitmap files.\n",
last_tracked_lsn, tracking_start_lsn);
exit(1);
}
return (last_tracked_lsn == tracking_start_lsn)
|| (log_sys->lsn - last_tracked_lsn
<= log_sys->log_group_capacity);
}
/****************************************************************//**
Diagnose a gap in tracked LSN range on server startup due to crash or
very fast shutdown and try to close it by tracking the data
immediatelly, if possible. */
static
void
log_online_track_missing_on_startup(
/*================================*/
lsn_t last_tracked_lsn, /*!<in: last tracked LSN read from the
bitmap file */
lsn_t tracking_start_lsn) /*!<in: last checkpoint LSN of the
current server startup */
{
ut_ad(last_tracked_lsn != tracking_start_lsn);
ib_logf(IB_LOG_LEVEL_WARN, "last tracked LSN in \'%s\' is " LSN_PF
", but the last checkpoint LSN is " LSN_PF ". This might be "
"due to a server crash or a very fast shutdown. ",
log_bmp_sys->out.name, last_tracked_lsn, tracking_start_lsn);
/* See if we can fully recover the missing interval */
if (log_online_can_track_missing(last_tracked_lsn,
tracking_start_lsn)) {
ib_logf(IB_LOG_LEVEL_INFO,
"reading the log to advance the last tracked LSN.\n");
log_bmp_sys->start_lsn = ut_max(last_tracked_lsn,
MIN_TRACKED_LSN);
log_set_tracked_lsn(log_bmp_sys->start_lsn);
if (!log_online_follow_redo_log()) {
exit(1);
}
ut_ad(log_bmp_sys->end_lsn >= tracking_start_lsn);
ib_logf(IB_LOG_LEVEL_INFO,
"continuing tracking changed pages from LSN " LSN_PF
"\n", log_bmp_sys->end_lsn);
}
else {
ib_logf(IB_LOG_LEVEL_WARN,
"the age of last tracked LSN exceeds log capacity, "
"tracking-based incremental backups will work only "
"from the higher LSN!\n");
log_bmp_sys->end_lsn = log_bmp_sys->start_lsn
= tracking_start_lsn;
log_set_tracked_lsn(log_bmp_sys->start_lsn);
ib_logf(IB_LOG_LEVEL_INFO,
"starting tracking changed pages from LSN " LSN_PF
"\n", log_bmp_sys->end_lsn);
}
}
/*********************************************************************//**
Format a bitmap output file name to log_bmp_sys->out.name. */
static
void
log_online_make_bitmap_name(
/*=========================*/
lsn_t start_lsn) /*!< in: the start LSN name part */
{
ut_snprintf(log_bmp_sys->out.name, FN_REFLEN, bmp_file_name_template,
log_bmp_sys->bmp_file_home, bmp_file_name_stem,
log_bmp_sys->out_seq_num, start_lsn);
}
/*********************************************************************//**
Check if an old file that has the name of a new bitmap file we are about to
create should be overwritten. */
static
ibool
log_online_should_overwrite(
/*========================*/
const char *path) /*!< in: path to file */
{
dberr_t err;
os_file_stat_t file_info;
/* Currently, it's OK to overwrite 0-sized files only */
err = os_file_get_status(path, &file_info, false);
return err == DB_SUCCESS && file_info.type == OS_FILE_TYPE_FILE
&& file_info.size == 0LL;
}
/*********************************************************************//**
Create a new empty bitmap output file.
@return TRUE if operation succeeded, FALSE if I/O error */
static
ibool
log_online_start_bitmap_file(void)
/*==============================*/
{
ibool success = TRUE;
/* Check for an old file that should be deleted first */
if (log_online_should_overwrite(log_bmp_sys->out.name)) {
success = static_cast<ibool>(
os_file_delete_if_exists(innodb_file_bmp_key,
log_bmp_sys->out.name));
}
if (UNIV_LIKELY(success)) {
log_bmp_sys->out.file
= os_file_create_simple_no_error_handling(
innodb_file_bmp_key,
log_bmp_sys->out.name,
OS_FILE_CREATE,
OS_FILE_READ_WRITE,
&success);
}
if (UNIV_UNLIKELY(!success)) {
/* The following call prints an error message */
os_file_get_last_error(TRUE);
ib_logf(IB_LOG_LEVEL_ERROR,
"cannot create \'%s\'\n", log_bmp_sys->out.name);
return FALSE;
}
log_bmp_sys->out.offset = 0;
return TRUE;
}
/*********************************************************************//**
Close the current bitmap output file and create the next one.
@return TRUE if operation succeeded, FALSE if I/O error */
static
ibool
log_online_rotate_bitmap_file(
/*===========================*/
lsn_t next_file_start_lsn) /*!<in: the start LSN name
part */
{
if (log_bmp_sys->out.file != os_file_invalid) {
os_file_close(log_bmp_sys->out.file);
log_bmp_sys->out.file = os_file_invalid;
}
log_bmp_sys->out_seq_num++;
log_online_make_bitmap_name(next_file_start_lsn);
return log_online_start_bitmap_file();
}
/*********************************************************************//**
Check the name of a given file if it's a changed page bitmap file and
return file sequence and start LSN name components if it is. If is not,
the values of output parameters are undefined.
@return TRUE if a given file is a changed page bitmap file. */
static
ibool
log_online_is_bitmap_file(
/*======================*/
const os_file_stat_t* file_info, /*!<in: file to
check */
ulong* bitmap_file_seq_num, /*!<out: bitmap file
sequence number */
lsn_t* bitmap_file_start_lsn) /*!<out: bitmap file
start LSN */
{
char stem[FN_REFLEN];
ut_ad (strlen(file_info->name) < OS_FILE_MAX_PATH);
return ((file_info->type == OS_FILE_TYPE_FILE
|| file_info->type == OS_FILE_TYPE_LINK)
&& (sscanf(file_info->name, "%[a-z_]%lu_%llu.xdb", stem,
bitmap_file_seq_num,
(unsigned long long *)bitmap_file_start_lsn) == 3)
&& (!strcmp(stem, bmp_file_name_stem)));
}
/*********************************************************************//**
Initialize the online log following subsytem. */
UNIV_INTERN
void
log_online_read_init(void)
/*======================*/
{
ibool success;
lsn_t tracking_start_lsn
= ut_max(log_sys->last_checkpoint_lsn, MIN_TRACKED_LSN);
os_file_dir_t bitmap_dir;
os_file_stat_t bitmap_dir_file_info;
lsn_t last_file_start_lsn = MIN_TRACKED_LSN;
size_t srv_data_home_len;
/* Bitmap data start and end in a bitmap block must be 8-byte
aligned. */
compile_time_assert(MODIFIED_PAGE_BLOCK_BITMAP % 8 == 0);
compile_time_assert(MODIFIED_PAGE_BLOCK_BITMAP_LEN % 8 == 0);
log_bmp_sys = static_cast<log_bitmap_struct *>
(ut_malloc(sizeof(*log_bmp_sys)));
log_bmp_sys->read_buf_ptr = static_cast<byte *>
(ut_malloc(FOLLOW_SCAN_SIZE + OS_FILE_LOG_BLOCK_SIZE));
log_bmp_sys->read_buf = static_cast<byte *>
(ut_align(log_bmp_sys->read_buf_ptr, OS_FILE_LOG_BLOCK_SIZE));
mutex_create(log_bmp_sys_mutex_key, &log_bmp_sys->mutex,
SYNC_LOG_ONLINE);
/* Initialize bitmap file directory from srv_data_home and add a path
separator if needed. */
srv_data_home_len = strlen(srv_data_home);
ut_a (srv_data_home_len < FN_REFLEN);
strcpy(log_bmp_sys->bmp_file_home, srv_data_home);
if (srv_data_home_len
&& log_bmp_sys->bmp_file_home[srv_data_home_len - 1]
!= SRV_PATH_SEPARATOR) {
ut_a (srv_data_home_len < FN_REFLEN - 1);
log_bmp_sys->bmp_file_home[srv_data_home_len]
= SRV_PATH_SEPARATOR;
log_bmp_sys->bmp_file_home[srv_data_home_len + 1] = '\0';
}
/* Enumerate existing bitmap files to either open the last one to get
the last tracked LSN either to find that there are none and start
tracking from scratch. */
log_bmp_sys->out.name[0] = '\0';
log_bmp_sys->out_seq_num = 0;
bitmap_dir = os_file_opendir(log_bmp_sys->bmp_file_home, TRUE);
ut_a(bitmap_dir);
while (!os_file_readdir_next_file(log_bmp_sys->bmp_file_home,
bitmap_dir, &bitmap_dir_file_info)) {
ulong file_seq_num;
lsn_t file_start_lsn;
if (!log_online_is_bitmap_file(&bitmap_dir_file_info,
&file_seq_num,
&file_start_lsn)) {
continue;
}
if (file_seq_num > log_bmp_sys->out_seq_num
&& bitmap_dir_file_info.size > 0) {
log_bmp_sys->out_seq_num = file_seq_num;
last_file_start_lsn = file_start_lsn;
/* No dir component (log_bmp_sys->bmp_file_home) here,
because that's the cwd */
strncpy(log_bmp_sys->out.name,
bitmap_dir_file_info.name, FN_REFLEN - 1);
log_bmp_sys->out.name[FN_REFLEN - 1] = '\0';
}
}
if (os_file_closedir(bitmap_dir)) {
os_file_get_last_error(TRUE);
ib_logf(IB_LOG_LEVEL_ERROR, "cannot close \'%s\'\n",
log_bmp_sys->bmp_file_home);
exit(1);
}
if (!log_bmp_sys->out_seq_num) {
log_bmp_sys->out_seq_num = 1;
log_online_make_bitmap_name(0);
}
log_bmp_sys->modified_pages = rbt_create(MODIFIED_PAGE_BLOCK_SIZE,
log_online_compare_bmp_keys);
log_bmp_sys->page_free_list = NULL;
log_bmp_sys->out.file
= os_file_create_simple_no_error_handling
(innodb_file_bmp_key, log_bmp_sys->out.name, OS_FILE_OPEN,
OS_FILE_READ_WRITE, &success);
if (!success) {
/* New file, tracking from scratch */
if (!log_online_start_bitmap_file()) {
exit(1);
}
}
else {
/* Read the last tracked LSN from the last file */
lsn_t last_tracked_lsn;
lsn_t file_start_lsn;
log_bmp_sys->out.size
= os_file_get_size(log_bmp_sys->out.file);
log_bmp_sys->out.offset = log_bmp_sys->out.size;
if (log_bmp_sys->out.offset % MODIFIED_PAGE_BLOCK_SIZE != 0) {
ib_logf(IB_LOG_LEVEL_WARN,
"truncated block detected in \'%s\' at offset "
UINT64PF "\n",
log_bmp_sys->out.name,
log_bmp_sys->out.offset);
log_bmp_sys->out.offset -=
log_bmp_sys->out.offset
% MODIFIED_PAGE_BLOCK_SIZE;
}
last_tracked_lsn = log_online_read_last_tracked_lsn();
if (!last_tracked_lsn) {
last_tracked_lsn = last_file_start_lsn;
}
/* Start a new file. Choose the LSN value in its name based on
if we can retrack any missing data. */
if (log_online_can_track_missing(last_tracked_lsn,
tracking_start_lsn)) {
file_start_lsn = last_tracked_lsn;
} else {
file_start_lsn = tracking_start_lsn;
}
if (!log_online_rotate_bitmap_file(file_start_lsn)) {
exit(1);
}
if (last_tracked_lsn < tracking_start_lsn) {
log_online_track_missing_on_startup
(last_tracked_lsn, tracking_start_lsn);
return;
}
if (last_tracked_lsn > tracking_start_lsn) {
ib_logf(IB_LOG_LEVEL_WARN,
"last tracked LSN is " LSN_PF ", but the last "
"checkpoint LSN is " LSN_PF ". The "
"tracking-based incremental backups will work "
"only from the latter LSN!\n",
last_tracked_lsn, tracking_start_lsn);
}
}
ib_logf(IB_LOG_LEVEL_INFO, "starting tracking changed pages from LSN "
LSN_PF "\n", tracking_start_lsn);
log_bmp_sys->start_lsn = tracking_start_lsn;
log_set_tracked_lsn(tracking_start_lsn);
}
/*********************************************************************//**
Shut down the online log following subsystem. */
UNIV_INTERN
void
log_online_read_shutdown(void)
/*==========================*/
{
ib_rbt_node_t *free_list_node = log_bmp_sys->page_free_list;
if (log_bmp_sys->out.file != os_file_invalid) {
os_file_close(log_bmp_sys->out.file);
log_bmp_sys->out.file = os_file_invalid;
}
rbt_free(log_bmp_sys->modified_pages);
while (free_list_node) {
ib_rbt_node_t *next = free_list_node->left;
ut_free(free_list_node);
free_list_node = next;
}
mutex_free(&log_bmp_sys->mutex);
ut_free(log_bmp_sys->read_buf_ptr);
ut_free(log_bmp_sys);
}
/*********************************************************************//**
For the given minilog record type determine if the record has (space; page)
associated with it.
@return TRUE if the record has (space; page) in it */
static
ibool
log_online_rec_has_page(
/*====================*/
byte type) /*!<in: the minilog record type */
{
return type != MLOG_MULTI_REC_END && type != MLOG_DUMMY_RECORD;
}
/*********************************************************************//**
Check if a page field for a given log record type actually contains a page
id. It does not for file operations and MLOG_LSN.
@return TRUE if page field contains actual page id, FALSE otherwise */
static
ibool
log_online_rec_page_means_page(
/*===========================*/
byte type) /*!<in: log record type */
{
return log_online_rec_has_page(type)
#ifdef UNIV_LOG_LSN_DEBUG
&& type != MLOG_LSN
#endif
&& type != MLOG_FILE_CREATE
&& type != MLOG_FILE_RENAME
&& type != MLOG_FILE_DELETE
&& type != MLOG_FILE_CREATE2;
}
/*********************************************************************//**
Parse the log data in the parse buffer for the (space, page) pairs and add
them to the modified page set as necessary. Removes the fully-parsed records
from the buffer. If an incomplete record is found, moves it to the end of the
buffer. */
static
void
log_online_parse_redo_log(void)
/*===========================*/
{
byte *ptr = log_bmp_sys->parse_buf;
byte *end = log_bmp_sys->parse_buf_end;
ulint len = 0;
ut_ad(mutex_own(&log_bmp_sys->mutex));
while (ptr != end
&& log_bmp_sys->next_parse_lsn < log_bmp_sys->end_lsn) {
byte type;
ulint space;
ulint page_no;
byte* body;
/* recv_sys is not initialized, so on corrupt log we will
SIGSEGV. But the log of a live database should not be
corrupt. */
len = recv_parse_log_rec(ptr, end, &type, &space, &page_no,
&body);
if (len > 0) {
if (log_online_rec_page_means_page(type)) {
ut_a(len >= 3);
log_online_set_page_bit(space, page_no);
}
ptr += len;
ut_ad(ptr <= end);
log_bmp_sys->next_parse_lsn
= recv_calc_lsn_on_data_add
(log_bmp_sys->next_parse_lsn, len);
}
else {
/* Incomplete log record. Shift it to the
beginning of the parse buffer and leave it to be
completed on the next read. */
ut_memmove(log_bmp_sys->parse_buf, ptr, end - ptr);
log_bmp_sys->parse_buf_end
= log_bmp_sys->parse_buf + (end - ptr);
ptr = end;
}
}
if (len > 0) {
log_bmp_sys->parse_buf_end = log_bmp_sys->parse_buf;
}
}
/*********************************************************************//**
Check the log block checksum.
@return TRUE if the log block checksum is OK, FALSE otherwise. */
static
ibool
log_online_is_valid_log_seg(
/*========================*/
const byte* log_block) /*!< in: read log data */
{
ibool checksum_is_ok
= log_block_checksum_is_ok_or_old_format(log_block);
if (!checksum_is_ok) {
ib_logf(IB_LOG_LEVEL_ERROR,
"log block checksum mismatch: expected " ULINTPF ", "
"calculated checksum " ULINTPF "\n",
log_block_get_checksum(log_block),
log_block_calc_checksum(log_block));
}
return checksum_is_ok;
}
/*********************************************************************//**
Copy new log data to the parse buffer while skipping log block header,
trailer and already parsed data. */
static
void
log_online_add_to_parse_buf(
/*========================*/
const byte* log_block, /*!< in: read log data */
ulint data_len, /*!< in: length of read log data */
ulint skip_len) /*!< in: how much of log data to
skip */
{
ulint start_offset = skip_len ? skip_len : LOG_BLOCK_HDR_SIZE;
ulint end_offset
= (data_len == OS_FILE_LOG_BLOCK_SIZE)
? data_len - LOG_BLOCK_TRL_SIZE
: data_len;
ulint actual_data_len = (end_offset >= start_offset)
? end_offset - start_offset : 0;
ut_ad(mutex_own(&log_bmp_sys->mutex));
ut_memcpy(log_bmp_sys->parse_buf_end, log_block + start_offset,
actual_data_len);
log_bmp_sys->parse_buf_end += actual_data_len;
ut_a(log_bmp_sys->parse_buf_end - log_bmp_sys->parse_buf
<= RECV_PARSING_BUF_SIZE);
}
/*********************************************************************//**
Parse the log block: first copies the read log data to the parse buffer while
skipping log block header, trailer and already parsed data. Then it actually
parses the log to add to the modified page bitmap. */
static
void
log_online_parse_redo_log_block(
/*============================*/
const byte* log_block, /*!< in: read log data */
ulint skip_already_parsed_len) /*!< in: how many bytes of
log data should be skipped as
they were parsed before */
{
ulint block_data_len;
ut_ad(mutex_own(&log_bmp_sys->mutex));
block_data_len = log_block_get_data_len(log_block);
ut_ad(block_data_len % OS_FILE_LOG_BLOCK_SIZE == 0
|| block_data_len < OS_FILE_LOG_BLOCK_SIZE);
log_online_add_to_parse_buf(log_block, block_data_len,
skip_already_parsed_len);
log_online_parse_redo_log();
}
/*********************************************************************//**
Read and parse one redo log chunk and updates the modified page bitmap. */
static
void
log_online_follow_log_seg(
/*======================*/
log_group_t* group, /*!< in: the log group to use */
lsn_t block_start_lsn, /*!< in: the LSN to read from */
lsn_t block_end_lsn) /*!< in: the LSN to read to */
{
/* Pointer to the current OS_FILE_LOG_BLOCK-sized chunk of the read log
data to parse */
byte* log_block = log_bmp_sys->read_buf;
byte* log_block_end = log_bmp_sys->read_buf
+ (block_end_lsn - block_start_lsn);
ut_ad(mutex_own(&log_bmp_sys->mutex));
mutex_enter(&log_sys->mutex);
log_group_read_log_seg(LOG_RECOVER, log_bmp_sys->read_buf,
group, block_start_lsn, block_end_lsn, TRUE);
/* log_group_read_log_seg will release the log_sys->mutex for us */
while (log_block < log_block_end
&& log_bmp_sys->next_parse_lsn < log_bmp_sys->end_lsn) {
/* How many bytes of log data should we skip in the current log
block. Skipping is necessary because we round down the next
parse LSN thus it is possible to read the already-processed log
data many times */
ulint skip_already_parsed_len = 0;
if (!log_online_is_valid_log_seg(log_block)) {
break;
}
if ((block_start_lsn <= log_bmp_sys->next_parse_lsn)
&& (block_start_lsn + OS_FILE_LOG_BLOCK_SIZE
> log_bmp_sys->next_parse_lsn)) {
/* The next parse LSN is inside the current block, skip
data preceding it. */
skip_already_parsed_len
= (ulint)(log_bmp_sys->next_parse_lsn
- block_start_lsn);
}
else {
/* If the next parse LSN is not inside the current
block, then the only option is that we have processed
ahead already. */
ut_a(block_start_lsn > log_bmp_sys->next_parse_lsn);
}
/* TODO: merge the copying to the parse buf code with
skip_already_len calculations */
log_online_parse_redo_log_block(log_block,
skip_already_parsed_len);
log_block += OS_FILE_LOG_BLOCK_SIZE;
block_start_lsn += OS_FILE_LOG_BLOCK_SIZE;
}
return;
}
/*********************************************************************//**
Read and parse the redo log in a given group in FOLLOW_SCAN_SIZE-sized
chunks and updates the modified page bitmap. */
static
void
log_online_follow_log_group(
/*========================*/
log_group_t* group, /*!< in: the log group to use */
lsn_t contiguous_lsn) /*!< in: the LSN of log block start
containing the log_parse_start_lsn */
{
lsn_t block_start_lsn = contiguous_lsn;
lsn_t block_end_lsn;
ut_ad(mutex_own(&log_bmp_sys->mutex));
log_bmp_sys->next_parse_lsn = log_bmp_sys->start_lsn;
log_bmp_sys->parse_buf_end = log_bmp_sys->parse_buf;
do {
block_end_lsn = block_start_lsn + FOLLOW_SCAN_SIZE;
log_online_follow_log_seg(group, block_start_lsn,
block_end_lsn);
/* Next parse LSN can become higher than the last read LSN
only in the case when the read LSN falls right on the block
boundary, in which case next parse lsn is bumped to the actual
data LSN on the next (not yet read) block. This assert is
slightly conservative. */
ut_a(log_bmp_sys->next_parse_lsn
<= block_end_lsn + LOG_BLOCK_HDR_SIZE
+ LOG_BLOCK_TRL_SIZE);
block_start_lsn = block_end_lsn;
} while (block_end_lsn < log_bmp_sys->end_lsn);
/* Assert that the last read log record is a full one */
ut_a(log_bmp_sys->parse_buf_end == log_bmp_sys->parse_buf);
}
/*********************************************************************//**
Write, flush one bitmap block to disk and advance the output position if
successful.
@return TRUE if page written OK, FALSE if I/O error */
static
ibool
log_online_write_bitmap_page(
/*=========================*/
const byte *block) /*!< in: block to write */
{
ibool success;
ut_ad(mutex_own(&log_bmp_sys->mutex));
/* Simulate a write error */
DBUG_EXECUTE_IF("bitmap_page_write_error", return FALSE;);
success = os_file_write(log_bmp_sys->out.name, log_bmp_sys->out.file,
block, log_bmp_sys->out.offset,
MODIFIED_PAGE_BLOCK_SIZE);
if (UNIV_UNLIKELY(!success)) {
/* The following call prints an error message */
os_file_get_last_error(TRUE);
ib_logf(IB_LOG_LEVEL_ERROR, "failed writing changed page "
"bitmap file \'%s\'\n", log_bmp_sys->out.name);
return FALSE;
}
success = os_file_flush(log_bmp_sys->out.file);
if (UNIV_UNLIKELY(!success)) {
/* The following call prints an error message */
os_file_get_last_error(TRUE);
ib_logf(IB_LOG_LEVEL_ERROR, "failed flushing changed page "
"bitmap file \'%s\'\n", log_bmp_sys->out.name);
return FALSE;
}
#ifdef UNIV_LINUX
posix_fadvise(log_bmp_sys->out.file, log_bmp_sys->out.offset,
MODIFIED_PAGE_BLOCK_SIZE, POSIX_FADV_DONTNEED);
#endif
log_bmp_sys->out.offset += MODIFIED_PAGE_BLOCK_SIZE;
return TRUE;
}
/*********************************************************************//**
Append the current changed page bitmap to the bitmap file. Clears the
bitmap tree and recycles its nodes to the free list.
@return TRUE if bitmap written OK, FALSE if I/O error*/
static
ibool
log_online_write_bitmap(void)
/*=========================*/
{
ib_rbt_node_t *bmp_tree_node;
const ib_rbt_node_t *last_bmp_tree_node;
ibool success = TRUE;
ut_ad(mutex_own(&log_bmp_sys->mutex));
if (log_bmp_sys->out.offset >= srv_max_bitmap_file_size) {
if (!log_online_rotate_bitmap_file(log_bmp_sys->start_lsn)) {
return FALSE;
}
}
bmp_tree_node = (ib_rbt_node_t *)
rbt_first(log_bmp_sys->modified_pages);
last_bmp_tree_node = rbt_last(log_bmp_sys->modified_pages);
while (bmp_tree_node) {
byte *page = rbt_value(byte, bmp_tree_node);
/* In case of a bitmap page write error keep on looping over
the tree to reclaim its memory through the free list instead of
returning immediatelly. */
if (UNIV_LIKELY(success)) {
if (bmp_tree_node == last_bmp_tree_node) {
mach_write_to_4(page
+ MODIFIED_PAGE_IS_LAST_BLOCK,
1);
}
mach_write_to_8(page + MODIFIED_PAGE_START_LSN,
log_bmp_sys->start_lsn);
mach_write_to_8(page + MODIFIED_PAGE_END_LSN,
log_bmp_sys->end_lsn);
mach_write_to_4(page + MODIFIED_PAGE_BLOCK_CHECKSUM,
log_online_calc_checksum(page));
success = log_online_write_bitmap_page(page);
}
bmp_tree_node->left = log_bmp_sys->page_free_list;
log_bmp_sys->page_free_list = bmp_tree_node;
bmp_tree_node = (ib_rbt_node_t*)
rbt_next(log_bmp_sys->modified_pages, bmp_tree_node);
DBUG_EXECUTE_IF("bitmap_page_2_write_error",
DBUG_SET("+d,bitmap_page_write_error"););
}
rbt_reset(log_bmp_sys->modified_pages);
return success;
}
/*********************************************************************//**
Read and parse the redo log up to last checkpoint LSN to build the changed
page bitmap which is then written to disk.
@return TRUE if log tracking succeeded, FALSE if bitmap write I/O error */
UNIV_INTERN
ibool
log_online_follow_redo_log(void)
/*============================*/
{
lsn_t contiguous_start_lsn;
log_group_t* group;
ibool result;
mutex_enter(&log_bmp_sys->mutex);
if (!srv_track_changed_pages) {
mutex_exit(&log_bmp_sys->mutex);
return FALSE;
}
ut_ad(!srv_read_only_mode);
/* Grab the LSN of the last checkpoint, we will parse up to it */
mutex_enter(&(log_sys->mutex));
log_bmp_sys->end_lsn = log_sys->last_checkpoint_lsn;
mutex_exit(&(log_sys->mutex));
if (log_bmp_sys->end_lsn == log_bmp_sys->start_lsn) {
mutex_exit(&log_bmp_sys->mutex);
return TRUE;
}
group = UT_LIST_GET_FIRST(log_sys->log_groups);
ut_a(group);
contiguous_start_lsn = ut_uint64_align_down(log_bmp_sys->start_lsn,
OS_FILE_LOG_BLOCK_SIZE);
while (group) {
log_online_follow_log_group(group, contiguous_start_lsn);
group = UT_LIST_GET_NEXT(log_groups, group);
}
/* A crash injection site that ensures last checkpoint LSN > last
tracked LSN, so that LSN tracking for this interval is tested. */
DBUG_EXECUTE_IF("crash_before_bitmap_write", DBUG_SUICIDE(););
result = log_online_write_bitmap();
log_bmp_sys->start_lsn = log_bmp_sys->end_lsn;
log_set_tracked_lsn(log_bmp_sys->start_lsn);
mutex_exit(&log_bmp_sys->mutex);
return result;
}
/*********************************************************************//**
Diagnose a bitmap file range setup failure and free the partially-initialized
bitmap file range. */
UNIV_COLD
static
void
log_online_diagnose_inconsistent_dir(
/*=================================*/
log_online_bitmap_file_range_t *bitmap_files) /*!<in/out: bitmap file
range */
{
ib_logf(IB_LOG_LEVEL_WARN,
"InnoDB: Warning: inconsistent bitmap file "
"directory for a "
"INFORMATION_SCHEMA.INNODB_CHANGED_PAGES query"
"\n");
free(bitmap_files->files);
}
/*********************************************************************//**
List the bitmap files in srv_data_home and setup their range that contains the
specified LSN interval. This range, if non-empty, will start with a file that
has the greatest LSN equal to or less than the start LSN and will include all
the files up to the one with the greatest LSN less than the end LSN. Caller
must free bitmap_files->files when done if bitmap_files set to non-NULL and
this function returned TRUE. Field bitmap_files->count might be set to a
larger value than the actual count of the files, and space for the unused array
slots will be allocated but cleared to zeroes.
@return TRUE if succeeded
*/
static
ibool
log_online_setup_bitmap_file_range(
/*===============================*/
log_online_bitmap_file_range_t *bitmap_files, /*!<in/out: bitmap file
range */
lsn_t range_start, /*!<in: start LSN */
lsn_t range_end) /*!<in: end LSN */
{
os_file_dir_t bitmap_dir;
os_file_stat_t bitmap_dir_file_info;
ulong first_file_seq_num = ULONG_MAX;
ulong last_file_seq_num = 0;
lsn_t first_file_start_lsn = LSN_MAX;
ut_ad(range_end >= range_start);
bitmap_files->count = 0;
bitmap_files->files = NULL;
/* 1st pass: size the info array */
bitmap_dir = os_file_opendir(srv_data_home, FALSE);
if (UNIV_UNLIKELY(!bitmap_dir)) {
ib_logf(IB_LOG_LEVEL_ERROR,
"failed to open bitmap directory \'%s\'\n",
srv_data_home);
return FALSE;
}
while (!os_file_readdir_next_file(srv_data_home, bitmap_dir,
&bitmap_dir_file_info)) {
ulong file_seq_num;
lsn_t file_start_lsn;
if (!log_online_is_bitmap_file(&bitmap_dir_file_info,
&file_seq_num,
&file_start_lsn)
|| file_start_lsn >= range_end) {
continue;
}
if (file_seq_num > last_file_seq_num) {
last_file_seq_num = file_seq_num;
}
if (file_start_lsn >= range_start
|| file_start_lsn == first_file_start_lsn
|| first_file_start_lsn > range_start) {
/* A file that falls into the range */
if (file_start_lsn < first_file_start_lsn) {
first_file_start_lsn = file_start_lsn;
}
if (file_seq_num < first_file_seq_num) {
first_file_seq_num = file_seq_num;
}
} else if (file_start_lsn > first_file_start_lsn) {
/* A file that has LSN closer to the range start
but smaller than it, replacing another such file */
first_file_start_lsn = file_start_lsn;
first_file_seq_num = file_seq_num;
}
}
if (UNIV_UNLIKELY(os_file_closedir(bitmap_dir))) {
os_file_get_last_error(TRUE);
ib_logf(IB_LOG_LEVEL_ERROR, "cannot close \'%s\'\n",
srv_data_home);
return FALSE;
}
if (first_file_seq_num == ULONG_MAX && last_file_seq_num == 0) {
bitmap_files->count = 0;
return TRUE;
}
bitmap_files->count = last_file_seq_num - first_file_seq_num + 1;
DEBUG_SYNC_C("setup_bitmap_range_middle");
/* 2nd pass: get the file names in the file_seq_num order */
bitmap_dir = os_file_opendir(srv_data_home, FALSE);
if (UNIV_UNLIKELY(!bitmap_dir)) {
ib_logf(IB_LOG_LEVEL_ERROR,
"failed to open bitmap directory \'%s\'\n",
srv_data_home);
return FALSE;
}
bitmap_files->files
= static_cast<log_online_bitmap_file_range_struct::files_t *>
(ut_malloc(bitmap_files->count
* sizeof(bitmap_files->files[0])));
memset(bitmap_files->files, 0,
bitmap_files->count * sizeof(bitmap_files->files[0]));
while (!os_file_readdir_next_file(srv_data_home, bitmap_dir,
&bitmap_dir_file_info)) {
ulong file_seq_num;
lsn_t file_start_lsn;
size_t array_pos;
if (!log_online_is_bitmap_file(&bitmap_dir_file_info,
&file_seq_num,
&file_start_lsn)
|| file_start_lsn >= range_end
|| file_start_lsn < first_file_start_lsn) {
continue;
}
array_pos = file_seq_num - first_file_seq_num;
if (UNIV_UNLIKELY(array_pos >= bitmap_files->count)) {
log_online_diagnose_inconsistent_dir(bitmap_files);
return FALSE;
}
if (file_seq_num > bitmap_files->files[array_pos].seq_num) {
bitmap_files->files[array_pos].seq_num = file_seq_num;
strncpy(bitmap_files->files[array_pos].name,
bitmap_dir_file_info.name, FN_REFLEN);
bitmap_files->files[array_pos].name[FN_REFLEN - 1]
= '\0';
bitmap_files->files[array_pos].start_lsn
= file_start_lsn;
}
}
if (UNIV_UNLIKELY(os_file_closedir(bitmap_dir))) {
os_file_get_last_error(TRUE);
ib_logf(IB_LOG_LEVEL_ERROR, "cannot close \'%s\'\n",
srv_data_home);
free(bitmap_files->files);
return FALSE;
}
if (!bitmap_files->files[0].seq_num
|| bitmap_files->files[0].seq_num != first_file_seq_num) {
log_online_diagnose_inconsistent_dir(bitmap_files);
return FALSE;
}
{
size_t i;
for (i = 1; i < bitmap_files->count; i++) {
if (!bitmap_files->files[i].seq_num) {
break;
}
if ((bitmap_files->files[i].seq_num
<= bitmap_files->files[i - 1].seq_num)
|| (bitmap_files->files[i].start_lsn
< bitmap_files->files[i - 1].start_lsn)) {
log_online_diagnose_inconsistent_dir(
bitmap_files);
return FALSE;
}
}
}
return TRUE;
}
/****************************************************************//**
Open a bitmap file for reading.
@return TRUE if opened successfully */
static
ibool
log_online_open_bitmap_file_read_only(
/*==================================*/
const char* name, /*!<in: bitmap file
name without directory,
which is assumed to be
srv_data_home */
log_online_bitmap_file_t* bitmap_file) /*!<out: opened bitmap
file */
{
ibool success = FALSE;
ut_ad(name[0] != '\0');
ut_snprintf(bitmap_file->name, FN_REFLEN, "%s%s", srv_data_home, name);
bitmap_file->file
= os_file_create_simple_no_error_handling(innodb_file_bmp_key,
bitmap_file->name,
OS_FILE_OPEN,
OS_FILE_READ_ONLY,
&success);
if (UNIV_UNLIKELY(!success)) {
/* Here and below assume that bitmap file names do not
contain apostrophes, thus no need for ut_print_filename(). */
ib_logf(IB_LOG_LEVEL_WARN,
"error opening the changed page bitmap \'%s\'\n",
bitmap_file->name);
return FALSE;
}
bitmap_file->size = os_file_get_size(bitmap_file->file);
bitmap_file->offset = 0;
#ifdef UNIV_LINUX
posix_fadvise(bitmap_file->file, 0, 0, POSIX_FADV_SEQUENTIAL);
posix_fadvise(bitmap_file->file, 0, 0, POSIX_FADV_NOREUSE);
#endif
return TRUE;
}
/****************************************************************//**
Diagnose one or both of the following situations if we read close to
the end of bitmap file:
1) Warn if the remainder of the file is less than one page.
2) Error if we cannot read any more full pages but the last read page
did not have the last-in-run flag set.
@return FALSE for the error */
static
ibool
log_online_diagnose_bitmap_eof(
/*===========================*/
const log_online_bitmap_file_t* bitmap_file, /*!< in: bitmap file */
ibool last_page_in_run)/*!< in: "last page in
run" flag value in the
last read page */
{
/* Check if we are too close to EOF to read a full page */
if ((bitmap_file->size < MODIFIED_PAGE_BLOCK_SIZE)
|| (bitmap_file->offset
> bitmap_file->size - MODIFIED_PAGE_BLOCK_SIZE)) {
if (UNIV_UNLIKELY(bitmap_file->offset != bitmap_file->size)) {
/* If we are not at EOF and we have less than one page
to read, it's junk. This error is not fatal in
itself. */
ib_logf(IB_LOG_LEVEL_WARN,
"junk at the end of changed page bitmap file "
"\'%s\'.\n", bitmap_file->name);
}
if (UNIV_UNLIKELY(!last_page_in_run)) {
/* We are at EOF but the last read page did not finish
a run */
/* It's a "Warning" here because it's not a fatal error
for the whole server */
ib_logf(IB_LOG_LEVEL_WARN,
"changed page bitmap file \'%s\' does not "
"contain a complete run at the end.\n",
bitmap_file->name);
return FALSE;
}
}
return TRUE;
}
/*********************************************************************//**
Initialize the log bitmap iterator for a given range. The records are
processed at a bitmap block granularity, i.e. all the records in the same block
share the same start and end LSN values, the exact LSN of each record is
unavailable (nor is it defined for blocks that are touched more than once in
the LSN interval contained in the block). Thus min_lsn and max_lsn should be
set at block boundaries or bigger, otherwise the records at the 1st and the
last blocks will not be returned. Also note that there might be returned
records with LSN < min_lsn, as min_lsn is used to select the correct starting
file but not block.
@return TRUE if the iterator is initialized OK, FALSE otherwise. */
UNIV_INTERN
ibool
log_online_bitmap_iterator_init(
/*============================*/
log_bitmap_iterator_t *i, /*!<in/out: iterator */
lsn_t min_lsn,/*!< in: start LSN */
lsn_t max_lsn)/*!< in: end LSN */
{
ut_a(i);
if (UNIV_UNLIKELY(min_lsn > max_lsn)) {
/* Empty range */
i->in_files.count = 0;
i->in_files.files = NULL;
i->in.file = os_file_invalid;
i->page = NULL;
i->failed = FALSE;
return TRUE;
}
if (!log_online_setup_bitmap_file_range(&i->in_files, min_lsn,
max_lsn)) {
i->failed = TRUE;
return FALSE;
}
i->in_i = 0;
if (i->in_files.count == 0) {
/* Empty range */
i->in.file = os_file_invalid;
i->page = NULL;
i->failed = FALSE;
return TRUE;
}
/* Open the 1st bitmap file */
if (UNIV_UNLIKELY(!log_online_open_bitmap_file_read_only(
i->in_files.files[i->in_i].name,
&i->in))) {
i->in_i = i->in_files.count;
free(i->in_files.files);
i->failed = TRUE;
return FALSE;
}
i->page = static_cast<byte *>(ut_malloc(MODIFIED_PAGE_BLOCK_SIZE));
i->bit_offset = MODIFIED_PAGE_BLOCK_BITMAP_LEN;
i->start_lsn = i->end_lsn = 0;
i->space_id = 0;
i->first_page_id = 0;
i->last_page_in_run = TRUE;
i->changed = FALSE;
i->failed = FALSE;
return TRUE;
}
/*********************************************************************//**
Releases log bitmap iterator. */
UNIV_INTERN
void
log_online_bitmap_iterator_release(
/*===============================*/
log_bitmap_iterator_t *i) /*!<in/out: iterator */
{
ut_a(i);
if (i->in.file != os_file_invalid) {
os_file_close(i->in.file);
i->in.file = os_file_invalid;
}
if (i->in_files.files) {
ut_free(i->in_files.files);
}
if (i->page) {
ut_free(i->page);
}
i->failed = TRUE;
}
/*********************************************************************//**
Iterates through bits of saved bitmap blocks.
Sequentially reads blocks from bitmap file(s) and interates through
their bits. Ignores blocks with wrong checksum.
@return TRUE if iteration is successful, FALSE if all bits are iterated. */
UNIV_INTERN
ibool
log_online_bitmap_iterator_next(
/*============================*/
log_bitmap_iterator_t *i) /*!<in/out: iterator */
{
ibool checksum_ok = FALSE;
ibool success;
ut_a(i);
if (UNIV_UNLIKELY(i->in_files.count == 0)) {
return FALSE;
}
if (UNIV_LIKELY(i->bit_offset < MODIFIED_PAGE_BLOCK_BITMAP_LEN))
{
++i->bit_offset;
i->changed =
IS_BIT_SET(i->page + MODIFIED_PAGE_BLOCK_BITMAP,
i->bit_offset);
return TRUE;
}
while (!checksum_ok)
{
while (i->in.size < MODIFIED_PAGE_BLOCK_SIZE
|| (i->in.offset
> i->in.size - MODIFIED_PAGE_BLOCK_SIZE)) {
/* Advance file */
i->in_i++;
success = os_file_close_no_error_handling(i->in.file);
i->in.file = os_file_invalid;
if (UNIV_UNLIKELY(!success)) {
os_file_get_last_error(TRUE);
i->failed = TRUE;
return FALSE;
}
success = log_online_diagnose_bitmap_eof(
&i->in, i->last_page_in_run);
if (UNIV_UNLIKELY(!success)) {
i->failed = TRUE;
return FALSE;
}
if (i->in_i == i->in_files.count) {
return FALSE;
}
if (UNIV_UNLIKELY(i->in_files.files[i->in_i].seq_num
== 0)) {
i->failed = TRUE;
return FALSE;
}
success = log_online_open_bitmap_file_read_only(
i->in_files.files[i->in_i].name,
&i->in);
if (UNIV_UNLIKELY(!success)) {
i->failed = TRUE;
return FALSE;
}
}
success = log_online_read_bitmap_page(&i->in, i->page,
&checksum_ok);
if (UNIV_UNLIKELY(!success)) {
os_file_get_last_error(TRUE);
ib_logf(IB_LOG_LEVEL_WARN,
"failed reading changed page bitmap file "
"\'%s\'\n", i->in_files.files[i->in_i].name);
i->failed = TRUE;
return FALSE;
}
}
i->start_lsn = mach_read_from_8(i->page + MODIFIED_PAGE_START_LSN);
i->end_lsn = mach_read_from_8(i->page + MODIFIED_PAGE_END_LSN);
i->space_id = mach_read_from_4(i->page + MODIFIED_PAGE_SPACE_ID);
i->first_page_id = mach_read_from_4(i->page
+ MODIFIED_PAGE_1ST_PAGE_ID);
i->last_page_in_run = mach_read_from_4(i->page
+ MODIFIED_PAGE_IS_LAST_BLOCK);
i->bit_offset = 0;
i->changed = IS_BIT_SET(i->page + MODIFIED_PAGE_BLOCK_BITMAP,
i->bit_offset);
return TRUE;
}
/************************************************************//**
Delete all the bitmap files for data less than the specified LSN.
If called with lsn == 0 (i.e. set by RESET request) or LSN_MAX,
restart the bitmap file sequence, otherwise continue it.
@return FALSE to indicate success, TRUE for failure. */
UNIV_INTERN
ibool
log_online_purge_changed_page_bitmaps(
/*==================================*/
lsn_t lsn) /*!< in: LSN to purge files up to */
{
log_online_bitmap_file_range_t bitmap_files;
size_t i;
ibool result = FALSE;
if (lsn == 0) {
lsn = LSN_MAX;
}
if (srv_track_changed_pages) {
/* User requests might happen with both enabled and disabled
tracking */
mutex_enter(&log_bmp_sys->mutex);
}
if (!log_online_setup_bitmap_file_range(&bitmap_files, 0, LSN_MAX)) {
if (srv_track_changed_pages) {
mutex_exit(&log_bmp_sys->mutex);
}
return TRUE;
}
if (srv_track_changed_pages && lsn > log_bmp_sys->end_lsn) {
/* If we have to delete the current output file, close it
first. */
os_file_close(log_bmp_sys->out.file);
log_bmp_sys->out.file = os_file_invalid;
}
for (i = 0; i < bitmap_files.count; i++) {
/* We consider the end LSN of the current bitmap, derived from
the start LSN of the subsequent bitmap file, to determine
whether to remove the current bitmap. Note that bitmap_files
does not contain an entry for the bitmap past the given LSN so
we must check the boundary conditions as well. For example,
consider 1_0.xdb and 2_10.xdb and querying LSN 5. bitmap_files
will only contain 1_0.xdb and we must not delete it since it
represents LSNs 0-9. */
if ((i + 1 == bitmap_files.count
|| bitmap_files.files[i + 1].seq_num == 0
|| bitmap_files.files[i + 1].start_lsn > lsn)
&& (lsn != LSN_MAX)) {
break;
}
if (!os_file_delete_if_exists(innodb_file_bmp_key,
bitmap_files.files[i].name)) {
os_file_get_last_error(TRUE);
result = TRUE;
break;
}
}
if (srv_track_changed_pages) {
if (lsn > log_bmp_sys->end_lsn) {
lsn_t new_file_lsn;
if (lsn == LSN_MAX) {
/* RESET restarts the sequence */
log_bmp_sys->out_seq_num = 0;
new_file_lsn = 0;
} else {
new_file_lsn = log_bmp_sys->end_lsn;
}
if (!log_online_rotate_bitmap_file(new_file_lsn)) {
/* If file create failed, signal the log
tracking thread to quit next time it wakes
up. */
srv_track_changed_pages = FALSE;
}
}
mutex_exit(&log_bmp_sys->mutex);
}
free(bitmap_files.files);
return result;
}
|
joelmahoney/discoverbps
|
app/helpers/admin/demand_data_helper.rb
|
module Admin::DemandDataHelper
end
|
lilsweetcaligula/Online-Judges
|
hackerrank/algorithms/bit_manipulation/easy/flipping_bits/py/solution.py
|
<reponame>lilsweetcaligula/Online-Judges<filename>hackerrank/algorithms/bit_manipulation/easy/flipping_bits/py/solution.py
#!/bin/python
import ctypes
testCount = int(raw_input())
for testId in range(testCount):
value = int(raw_input())
flipped = ~value & 0xFFFFFFFF
print flipped
|
shiniwat/sdl_ios
|
SmartDeviceLink/SDLSystemCapabilityType.h
|
<reponame>shiniwat/sdl_ios
//
// SDLSystemCapabilityType.h
// SmartDeviceLink-iOS
//
// Created by <NAME> on 7/10/17.
// Copyright © 2017 smartdevicelink. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "SDLEnum.h"
/**
The type of system capability to get more information on
*/
@interface SDLSystemCapabilityType : SDLEnum
/**
* Convert String to SDLSystemCapabilityType
* @param value String
* @return SDLSystemCapabilityType
*/
+ (SDLSystemCapabilityType *)valueOf:(NSString *)value;
/**
Returns an array of all possible SDLSystemCapabilityType values
@return an array that store all possible SDLSystemCapabilityType
*/
+ (NSArray *)values;
/**
@abstract NAVIGATION
*/
+ (SDLSystemCapabilityType *)NAVIGATION;
/**
@abstract PHONE_CALL
*/
+ (SDLSystemCapabilityType *)PHONE_CALL;
/**
@abstract VIDEO_STREAMING
*/
+ (SDLSystemCapabilityType *)VIDEO_STREAMING;
@end
|
JACKCHEN96/Iceberg
|
src/main/java/com/iceberg/controller/UserInfoController.java
|
<filename>src/main/java/com/iceberg/controller/UserInfoController.java
package com.iceberg.controller;
import com.iceberg.entity.Privilege;
import com.iceberg.entity.Role;
import com.iceberg.entity.UserInfo;
import com.iceberg.service.PrivilegeService;
import com.iceberg.service.UserInfoService;
import com.iceberg.utils.Config;
import com.iceberg.utils.PageModel;
import com.iceberg.utils.Result;
import com.iceberg.utils.ResultUtil;
import com.iceberg.utils.Utils;
import java.io.IOException;
import java.util.List;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
/**
* description: this class is mainly focus on user's account information.
*/
@Controller
public class UserInfoController {
@Resource
private UserInfoService userInfoService;
@Resource
private PrivilegeService privilegeService;
/**
* login handler.
* @param request http request
* @param response http response
* @return redirect path.
*/
@RequestMapping(value = {"/", "login.html"})
public String toLogin(HttpServletRequest request, HttpServletResponse response) {
HttpSession session = request.getSession();
if (session.getAttribute(Config.CURRENT_USERNAME) == null) {
System.out.println("session attribute is null");
return "login";
} else {
try {
response.sendRedirect("/pages/index");
return null;
} catch (IOException e) {
return "login";
}
}
}
/**
* get user info for login.
* @param userInfo user info
* @param request http request
* @param response http response
* @return result message.
*/
@RequestMapping(value = "/login.do")
@ResponseBody
public Result getUserInfo(UserInfo userInfo, HttpServletRequest request,
HttpServletResponse response) {
boolean userIsExisted = userInfoService.userIsExisted(userInfo);
System.out.println(userIsExisted + " - " + request.getHeader("token"));
userInfo = getUserInfo(userInfo);
if (userIsExisted && userInfo == null) {
return ResultUtil.unSuccess("wrong username or password!");
} else {
// save user info in session
userInfo = setSessionUserInfo(userInfo, request.getSession());
// save user info in cookie
// setCookieUser(request,response);
return ResultUtil.success("login successful", userInfo);
}
}
/**
* get user info from service.
* @param userInfo user info.
* @return user info in the database.
*/
public UserInfo getUserInfo(UserInfo userInfo) {
return userInfoService.getUserInfo(userInfo);
}
/**
* get user privilege info through user info and save it in session.
*
* @param userInfo user info.
* @param session http session.
* @return user info.
*/
public UserInfo setSessionUserInfo(UserInfo userInfo, HttpSession session) {
List<Privilege> privileges = privilegeService.getPrivilegeByRoleid(userInfo.getRoleid());
userInfo.setPrivileges(privileges);
session.setAttribute(Config.CURRENT_USERNAME, userInfo);
return userInfo;
}
/**
* get user info by where.
* @param userInfo query user info.
* @param pageNo page no
* @param pageSize page size.
* @param session http session.
* @return result message
*/
@RequestMapping("/users/getUsersByWhere/{pageNo}/{pageSize}")
public @ResponseBody
Result getUsersByWhere(UserInfo userInfo, @PathVariable int pageNo, @PathVariable int pageSize,
HttpSession session) {
if ("".equals(userInfo.getGroupid()) || userInfo.getGroupid() == null) {
userInfo.setGroupid(null);
}
// if (userInfo.getRoleid() == -1) {
// //System.out.println("*****" + Config.getSessionUser(session));
// userInfo.setRoleid(Config.getSessionUser(session).getRoleid());
// }
//group manager cannot search administrator's userinfo.
if (userInfo.getGroupid() != null && userInfo.getRoleid() == 1) {
//cannot search user with role 1
userInfo.setRoleid(2);
}
Utils.log(userInfo.toString());
PageModel model = new PageModel<>(pageNo, userInfo);
model.setPageSize(pageSize);
return userInfoService.getUsersByWhere(model);
}
/**
* add user.
* @param userInfo user info
* @return result message in response body.
*/
@RequestMapping("/user/add")
public @ResponseBody
Result addUser(UserInfo userInfo) {
System.out.println(userInfo);
try {
int num = userInfoService.add(userInfo);
if (num > 0) {
return ResultUtil.success();
} else {
return ResultUtil.unSuccess();
}
} catch (Exception e) {
return ResultUtil.error(e);
}
}
/**
* update user info.
* @param userInfo user info.
* @return result message in response body.
*/
@RequestMapping("/user/update")
public @ResponseBody
Result updateUser(UserInfo userInfo, HttpSession session) {
try {
int num = userInfoService.update(userInfo);
//if he updates his own info, then update the session user.
if (num > 0) {
System.out.println();
UserInfo sessionUser = Config.getSessionUser(session);
System.out.println("sessionUser is null?" + sessionUser);
System.out.println(sessionUser.getId());
System.out.println(userInfo.getId());
if (sessionUser.getId().equals(userInfo.getId())) {
System.out.println("same user update");
sessionUser.setUsername(userInfo.getUsername());
sessionUser.setRealname(userInfo.getRealname());
sessionUser.setEmail(userInfo.getEmail());
sessionUser.setPassword(<PASSWORD>());
}
System.out.println("SessionUser now is :" + sessionUser);
return ResultUtil.success();
} else {
return ResultUtil.unSuccess();
}
} catch (Exception e) {
return ResultUtil.error(e);
}
}
/**
* delete user.
* @param id user id.
* @return result message in response body.
*/
@RequestMapping("/user/del/{id}")
public @ResponseBody
Result deleteUser(@PathVariable String id) {
try {
int num = userInfoService.delete(id);
if (num > 0) {
return ResultUtil.success();
} else {
return ResultUtil.unSuccess();
}
} catch (Exception e) {
return ResultUtil.error(e);
}
}
/**
* get session user.
* @param session http session.
* @return result message.
*/
@RequestMapping("/getSessionUser")
@ResponseBody
public UserInfo getSessionUser(HttpSession session) {
UserInfo sessionUser = (UserInfo) session.getAttribute(Config.CURRENT_USERNAME);
sessionUser.setPassword(null);
return sessionUser;
}
/**
* logout.
* @param request http request.
* @param response http response
* @return redirect url.
*/
@RequestMapping("/logout")
public String logout(HttpServletRequest request, HttpServletResponse response) {
// delCookieUser(request, response);
request.getSession().removeAttribute(Config.CURRENT_USERNAME);
return "login";
}
/**
* redirect pages.
* @param page request page.
* @return redirect url.
*/
@RequestMapping("/pages/{page}")
public String toPage(@PathVariable String page) {
return page.replace("_", "/");
}
/**
* get all roles.
* @return all roles.
*/
@RequestMapping("/getAllRoles")
public @ResponseBody
Result<Role> getAllRoles() {
try {
List<Role> roles = userInfoService.getAllRoles();
return ResultUtil.success(roles);
} catch (Exception e) {
return ResultUtil.error(e);
}
}
/**
* add role.
* @param role role
* @return result message
*/
@RequestMapping("/role/add")
public @ResponseBody
Result addRole(Role role) {
try {
int num = userInfoService.addRole(role);
if (num > 0) {
privilegeService.addDefaultPrivilegesWhenAddRole(role.getRoleid().toString());
return ResultUtil.success();
} else {
return ResultUtil.unSuccess();
}
} catch (Exception e) {
return ResultUtil.error(e);
}
}
/**
* update role.
* @param role role
* @return result message.
*/
@RequestMapping("/role/update")
public @ResponseBody
Result updateRole(Role role) {
try {
int num = userInfoService.updateRole(role);
if (num > 0) {
return ResultUtil.success();
} else {
return ResultUtil.unSuccess();
}
} catch (Exception e) {
return ResultUtil.error(e);
}
}
/**
* delete role.
* @param roleid role id.
* @return result message
*/
@RequestMapping("/role/del/{roleid}")
public @ResponseBody
Result deleteRole(@PathVariable String roleid) {
try {
privilegeService.delPrivilegesWenDelRole(roleid);
int num = userInfoService.deleteRole(roleid);
if (num > 0) {
return ResultUtil.success();
} else {
privilegeService.addDefaultPrivilegesWhenAddRole(roleid);
return ResultUtil.unSuccess();
}
} catch (Exception e) {
return ResultUtil.error(e);
}
}
/**
* get role by id.
* @param id role id
* @return result message
*/
@RequestMapping("/getRole/{id}")
public @ResponseBody
Result getRoleById(@PathVariable String id) {
try {
Role role = userInfoService.getRoleById(id);
if (role != null) {
return ResultUtil.success(role);
} else {
return ResultUtil.unSuccess();
}
} catch (Exception e) {
return ResultUtil.error(e);
}
}
}
|
bschade18/moviecity
|
client/src/components/movie/MovieVideos.js
|
import React from 'react';
import { youtubeUrl } from '../../config';
const MovieVideos = ({ videos }) => (
<div className="movie-videos">
<h1>Trailers & Clips</h1>
<div className="movie-videos-container">
{videos.map((video) => (
<div className="m-3" key={video.id}>
<p className="movie-videos-title">{video.name}</p>
<iframe
className="movie-video-dimensions"
title={video.name}
src={`${youtubeUrl}${video.key}`}
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
</div>
))}
</div>
</div>
);
export default MovieVideos;
|
extnet/docs5.ext.net
|
d3/d77/class_ext_1_1_net_1_1_bool_array_json_converter.js
|
var class_ext_1_1_net_1_1_bool_array_json_converter =
[
[ "CanConvert", "d3/d77/class_ext_1_1_net_1_1_bool_array_json_converter.html#a7190c8e8c79ab7f5ceba7e2566f33c18", null ],
[ "ReadJson", "d3/d77/class_ext_1_1_net_1_1_bool_array_json_converter.html#abd4e57f49dd2ce5760255354b2ffb684", null ],
[ "WriteJson", "d3/d77/class_ext_1_1_net_1_1_bool_array_json_converter.html#a4bf76d68ee069551b8ec5b1645b060bb", null ]
];
|
nandor/genm-opt
|
passes/sccp/lattice.h
|
// This file if part of the llir-opt project.
// Licensing information can be found in the LICENSE file.
// (C) 2018 <NAME>. All rights reserved.
#pragma once
#include <optional>
#include <llvm/ADT/APFloat.h>
#include <llvm/ADT/APInt.h>
#include <llvm/Support/raw_ostream.h>
using APInt = llvm::APInt;
using APFloat = llvm::APFloat;
class Global;
/**
* Lattice for SCCP values.
*/
class Lattice {
public:
/// Enumeration of lattice value kinds.
enum class Kind {
/// Top - value not encountered yet.
UNKNOWN,
/// Bot - value is not constant.
OVERDEFINED,
/// Constant integer.
INT,
/// Integral bit mask.
MASK,
/// Constant floating-point.
FLOAT,
/// Positive or negative float zero.
FLOAT_ZERO,
/// Offset into the frame.
FRAME,
/// Constant symbol with a potential offset.
GLOBAL,
/// Pointer which is not null.
POINTER,
/// Any offset into a pointer.
RANGE,
/// Constant, undefined.
UNDEFINED,
};
public:
Lattice(const Lattice &that);
~Lattice();
Kind GetKind() const { return kind_; }
bool IsUnknown() const { return GetKind() == Kind::UNKNOWN; }
bool IsOverdefined() const { return GetKind() == Kind::OVERDEFINED; }
bool IsUndefined() const { return GetKind() == Kind::UNDEFINED; }
bool IsInt() const { return GetKind() == Kind::INT; }
bool IsMask() const { return GetKind() == Kind::MASK; }
bool IsFloat() const { return GetKind() == Kind::FLOAT; }
bool IsFloatZero() const { return GetKind() == Kind::FLOAT_ZERO; }
bool IsGlobal() const { return GetKind() == Kind::GLOBAL; }
bool IsFrame() const { return GetKind() == Kind::FRAME; }
bool IsPointer() const { return GetKind() == Kind::POINTER; }
bool IsRange() const { return GetKind() == Kind::RANGE; }
bool IsPointerLike() const { return IsPointer() || IsFrame() || IsGlobal(); }
APInt GetInt() const { assert(IsInt()); return intVal_; }
APInt GetKnown() const { assert(IsMask()); return maskVal_.Known; }
APInt GetValue() const { assert(IsMask()); return maskVal_.Value; }
APFloat GetFloat() const { assert(IsFloat()); return floatVal_; }
unsigned GetFrameObject() const { assert(IsFrame()); return frameVal_.Obj; }
int64_t GetFrameOffset() const { assert(IsFrame()); return frameVal_.Off; }
Global *GetGlobalSymbol() const { assert(IsGlobal()); return globalVal_.Sym; }
int64_t GetGlobalOffset() const { assert(IsGlobal()); return globalVal_.Off; }
Global *GetRange() const { assert(IsRange()); return globalVal_.Sym; }
bool IsTrue() const;
bool IsFalse() const;
/// Returns some integer, if the value is one.
std::optional<APInt> AsInt() const
{
return IsInt() ? std::optional<APInt>(intVal_) : std::nullopt;
}
/// Returns some float, if the value is one.
std::optional<APFloat> AsFloat() const
{
return IsFloat() ? std::optional<APFloat>(floatVal_) : std::nullopt;
}
/// Checks if two values are not identical.
bool operator != (const Lattice &that) const { return !(*this == that); }
/// Checks if two values are identical.
bool operator == (const Lattice &that) const;
/// Assigns a value to a lattice.
Lattice &operator = (const Lattice &that);
/// Least upper bound operator.
Lattice LUB(const Lattice &that) const;
public:
/// Creates an unknown value.
static Lattice Unknown();
/// Creates an overdefined value.
static Lattice Overdefined();
/// Creates an undefined value.
static Lattice Undefined();
/// Creates a unknown pointer value.
static Lattice Pointer();
/// Creates a frame value.
static Lattice CreateFrame(unsigned obj, int64_t off);
/// Creates a global value.
static Lattice CreateGlobal(Global *g, int64_t Off = 0);
/// Creates a global value.
static Lattice CreateRange(Global *g);
/// Creates an integral value from an integer.
static Lattice CreateInteger(int64_t i);
/// Creates an integral value.
static Lattice CreateInteger(const APInt &i);
/// Creates a mask value;
static Lattice CreateMask(const APInt &known, const APInt &values);
/// Creates a floating value from a double.
static Lattice CreateFloat(double f);
/// Creates a floating value.
static Lattice CreateFloat(const APFloat &f);
/// Creates a float-point zero.
static Lattice CreateFloatZero();
private:
/// Creates a value of a certain kind.
Lattice(Kind kind) : kind_(kind) {}
/// Kind of the lattice value.
Kind kind_;
/// Union of possible values.
union {
/// Integer value.
APInt intVal_;
/// Double value.
APFloat floatVal_;
/// Bit mask value.
struct {
/// Mask indicating the bits which have known values.
APInt Known;
/// Mask indicating the values of those bits.
APInt Value;
} maskVal_;
/// Frame value.
struct {
/// Object identifier.
unsigned Obj;
/// Relative offset.
int64_t Off;
} frameVal_;
/// Global value.
struct {
/// Base pointer.
Global *Sym;
/// Relative offset.
int64_t Off;
} globalVal_;
};
};
llvm::raw_ostream &operator<<(llvm::raw_ostream &OS, const Lattice &l);
|
Hoommus/iscaribot
|
src/main/java/features/commands/events/CommandEventBuilder.java
|
package features.commands.events;
import model.entities.EnhancedGuild;
import model.entities.EnhancedMember;
import model.entities.EnhancedUser;
import net.dv8tion.jda.core.JDA;
import net.dv8tion.jda.core.entities.Member;
import net.dv8tion.jda.core.entities.Message;
import providers.EnhancedGuildsProvider;
import providers.EnhancedMembersProvider;
import providers.EnhancedUsersProvider;
import java.util.LinkedList;
import java.util.Map;
public class CommandEventBuilder {
private EnhancedGuild enhancedGuild;
private EnhancedUser enhancedUser;
private Member member;
private Message message;
private String commandName;
private LinkedList<String> args;
private JDA jda;
private Map<String, String> messageKeys;
private long responseNumber;
private EnhancedMembersProvider enhancedMembersProvider;
private EnhancedUsersProvider enhancedUsersProvider;
private EnhancedGuildsProvider enhancedGuildsProvider;
public CommandEventBuilder() {
}
public CommandEventBuilder(JDA jda, long responseNumber) {
this.jda = jda;
this.responseNumber = responseNumber;
}
public CommandEvent build() {
return new CommandEvent(jda, responseNumber, enhancedGuild, enhancedUser, member, message,
commandName, messageKeys, enhancedUsersProvider, enhancedGuildsProvider, args);
}
public CommandEventBuilder setEnhancedGuild(EnhancedGuild enhancedGuild) {
this.enhancedGuild = enhancedGuild;
return this;
}
public CommandEventBuilder setCommandName(String commandName) {
this.commandName = commandName;
return this;
}
public CommandEventBuilder setArgs(LinkedList<String> args) {
this.args = args;
return this;
}
public CommandEventBuilder setJda(JDA jda) {
this.jda = jda;
return this;
}
public CommandEventBuilder setResponseNumber(long responseNumber) {
this.responseNumber = responseNumber;
return this;
}
public CommandEventBuilder setEnhancedUser(EnhancedUser enhancedUser) {
this.enhancedUser = enhancedUser;
return this;
}
public CommandEventBuilder setMessage(Message message) {
this.message = message;
return this;
}
public CommandEventBuilder setMember(Member member) {
this.member = member;
return this;
}
public CommandEventBuilder setMessageKeys(Map<String, String> messageKeys) {
this.messageKeys = messageKeys;
return this;
}
public CommandEventBuilder setEnhancedUsersProvider(EnhancedUsersProvider enhancedUsersProvider) {
this.enhancedUsersProvider = enhancedUsersProvider;
return this;
}
public CommandEventBuilder setEnhancedGuildsProvider(EnhancedGuildsProvider enhancedGuildsProvider) {
this.enhancedGuildsProvider = enhancedGuildsProvider;
return this;
}
}
|
magnuswstrom/solidbeans-core
|
src/main/java/com/solidbeans/core/security/jwt/spring/SecurityJwtRepository.java
|
<filename>src/main/java/com/solidbeans/core/security/jwt/spring/SecurityJwtRepository.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.solidbeans.core.security.jwt.spring;
import com.solidbeans.core.security.jwt.algorithm.Algorithm;
import com.solidbeans.core.security.jwt.claims.Claims;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
/**
*
* @author <EMAIL>
*/
public interface SecurityJwtRepository<T> {
/**
* Should return secret depending on algorithm to match {@link com.solidbeans.core.security.jwt.algorithm.AlgorithmProvider#verify(byte[], byte[], Object)}
*
* @param algorithm Algorithm to get secret for
* @param claims Claims that shall be verified
* @param httpServletRequest The request
* @return Algorithm secret
*/
Object algorithmSecret(Algorithm algorithm, Claims<T> claims, HttpServletRequest httpServletRequest);
/**
* Returns roles for claims. Claims are verified at this point and can be trusted.
*
* @param claims Claims to get roles for
* @param httpServletRequest The request
* @return All roles that claims has access to
*/
List<String> roles(Claims<T> claims, HttpServletRequest httpServletRequest);
}
|
vandsonlima/SSP
|
src/test/java/org/jasig/ssp/web/api/PersonProgramStatusControllerIntegrationTest.java
|
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.ssp.web.api; // NOPMD
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Collection;
import java.util.Date;
import java.util.UUID;
import org.hibernate.SessionFactory;
import org.jasig.ssp.model.ObjectStatus;
import org.jasig.ssp.model.Person;
import org.jasig.ssp.model.PersonProgramStatus;
import org.jasig.ssp.service.ObjectNotFoundException;
import org.jasig.ssp.service.PersonProgramStatusService;
import org.jasig.ssp.service.PersonService;
import org.jasig.ssp.service.impl.SecurityServiceInTestEnvironment;
import org.jasig.ssp.service.reference.ProgramStatusChangeReasonService;
import org.jasig.ssp.transferobject.PersonProgramStatusTO;
import org.jasig.ssp.transferobject.ServiceResponse;
import org.jasig.ssp.web.api.validation.ValidationException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.transaction.TransactionConfiguration;
import org.springframework.transaction.annotation.Transactional;
/**
* {@link PersonProgramStatusController} tests
*
* @author jon.adams
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("../ControllerIntegrationTests-context.xml")
@TransactionConfiguration()
@Transactional
public class PersonProgramStatusControllerIntegrationTest { // NOPMD by jon
@Autowired
private transient PersonProgramStatusController controller;
@Autowired
protected transient SessionFactory sessionFactory;
@Autowired
protected transient PersonProgramStatusService personProgramStatusService;
@Autowired
protected transient ProgramStatusChangeReasonService programStatusChangeReasonService;
@Autowired
protected transient PersonService personService;
private static final UUID PERSON_ID = UUID
.fromString("1010e4a0-1001-0110-1011-4ffc02fe81ff");
private static final UUID PROGRAM_STATUS_ID = UUID
.fromString("b2d12527-5056-a51a-8054-113116baab88");
private static final UUID PROGRAM_STATUS_CHANGE_REASON_ID = UUID
.fromString("b2d127d7-5056-a51a-802d-3717c4316d29");
@Autowired
private transient SecurityServiceInTestEnvironment securityService;
/**
* Setup the security service with the administrator user.
*/
@Before
public void setUp() {
securityService.setCurrent(new Person(Person.SYSTEM_ADMINISTRATOR_ID),
"ROLE_PERSON_PROGRAM_STATUS_READ",
"ROLE_PERSON_PROGRAM_STATUS_WRITE",
"ROLE_PERSON_PROGRAM_STATUS_DELETE");
}
/**
* Test that the {@link PersonProgramStatusController#get(UUID, UUID)}
* action returns the correct validation errors when an invalid ID is sent.
*
* @throws ValidationException
* If validation error occurred.
* @throws ObjectNotFoundException
* If object could not be found.
*/
@Test(expected = ObjectNotFoundException.class)
public void testControllerGetOfInvalidId() throws ObjectNotFoundException,
ValidationException {
assertNotNull(
"Controller under test was not initialized by the container correctly.",
controller);
final PersonProgramStatusTO obj = controller.get(PERSON_ID,
UUID.randomUUID());
assertNull(
"Returned PersonProgramStatusTO from the controller should have been null.",
obj);
}
/**
* Test the
* {@link PersonProgramStatusController#getAll(UUID, ObjectStatus, Integer, Integer, String, String)}
* action.
*
* @throws ObjectNotFoundException
* If object could not be found.
*/
@Test
public void testControllerAll() throws ObjectNotFoundException {
final Collection<PersonProgramStatusTO> list = controller.getAll(
PERSON_ID, ObjectStatus.ACTIVE, null, null, null, null)
.getRows();
assertNotNull("List should not have been null.", list);
}
/**
* Test the
* {@link PersonProgramStatusController#create(UUID, PersonProgramStatusTO)}
* and {@link PersonProgramStatusController#delete(UUID, UUID)} actions.
*
* @throws ValidationException
* If validation error occurred.
* @throws ObjectNotFoundException
* If object could not be found.
*/
@Test(expected = ObjectNotFoundException.class)
public void testControllerDelete() throws ValidationException,
ObjectNotFoundException {
final Date now = new Date();
final PersonProgramStatusTO obj = createProgramStatus();
obj.setEffectiveDate(now);
final PersonProgramStatusTO saved = controller.create(PERSON_ID,
obj);
assertNotNull("Saved instance should not have been null.", saved);
final UUID savedId = saved.getId();
assertNotNull(
"Saved instance identifier should not have been null.",
savedId);
assertEquals("Saved instance values did not match.", now,
saved.getEffectiveDate());
assertEquals("Saved instance sets did not match.",
PROGRAM_STATUS_CHANGE_REASON_ID,
saved.getProgramStatusChangeReasonId());
final ServiceResponse response = controller.delete(savedId,
PERSON_ID);
assertNotNull("Deletion response should not have been null.",
response);
assertTrue("Deletion response did not return success.",
response.isSuccess());
final PersonProgramStatusTO afterDeletion = controller.get(savedId,
PERSON_ID);
// ObjectNotFoundException expected at this point
assertNull(
"Instance should not be able to get loaded after it has been deleted.",
afterDeletion);
}
/**
* Test that the
* {@link PersonProgramStatusController#create(UUID, PersonProgramStatusTO)}
* action auto-expires old instances correctly.
*
* @throws ValidationException
* If validation error occurred.
* @throws ObjectNotFoundException
* If object could not be found.
*/
@Test
public void testControllerCreateWithAutoExpiration()
throws ValidationException, ObjectNotFoundException {
final Date now = new Date();
final PersonProgramStatusTO obj = createProgramStatus();
obj.setEffectiveDate(now);
final PersonProgramStatusTO saved = controller.create(PERSON_ID,
obj);
final UUID savedId = saved.getId();
assertEquals("Saved effective date does not match.", now,
saved.getEffectiveDate());
assertEquals("Saved instance sets does not match.",
PROGRAM_STATUS_CHANGE_REASON_ID,
saved.getProgramStatusChangeReasonId());
final PersonProgramStatusTO obj2 = createProgramStatus();
obj2.setEffectiveDate(now);
final PersonProgramStatusTO saved2 = controller.create(PERSON_ID,
obj2);
assertNotNull("Saved instance should not have been null.", saved2);
final PersonProgramStatusTO autoExpired = controller.get(savedId,
PERSON_ID);
assertNotNull(
"Saved instance identifier should not have been null.",
autoExpired);
assertNotNull(
"Original instance should have been auto-expired when new one added.",
autoExpired.getExpirationDate());
assertNotNull(
"Original instance should have had an effective date.",
autoExpired.getEffectiveDate());
}
/**
* Test that the
* {@link PersonProgramStatusController#create(UUID, PersonProgramStatusTO)}
* action prohibits duplicates correctly.
*
* @throws ValidationException
* If validation error occurred.
* @throws ObjectNotFoundException
* If object could not be found.
*/
@Test(expected = ValidationException.class)
public void testControllerCreateDuplicateProhibition()
throws ValidationException, ObjectNotFoundException {
final Date now = new Date();
final PersonProgramStatusTO obj = createProgramStatus();
obj.setEffectiveDate(now);
final PersonProgramStatusTO saved = controller.create(PERSON_ID,
obj);
final UUID savedId = saved.getId();
assertEquals("Saved instance values did not match.", now,
saved.getEffectiveDate());
assertEquals("Saved instance sets did not match.",
PROGRAM_STATUS_CHANGE_REASON_ID,
saved.getProgramStatusChangeReasonId());
final PersonProgramStatusTO obj2 = createProgramStatus();
final PersonProgramStatusTO saved2 = controller.create(PERSON_ID,
obj2);
assertNotNull("Saved instance should not have been null.", saved2);
final PersonProgramStatusTO autoExpired = controller.get(savedId,
PERSON_ID);
assertNotNull(
"Saved instance identifier should not have been null.",
autoExpired);
assertNotNull(
"Original instance should have been auto-expired when new one added.",
autoExpired.getExpirationDate());
saved2.setExpirationDate(null);
controller.save(saved2.getId(), PERSON_ID, saved2);
}
@Test(expected = ValidationException.class)
public void testControllerCreateWithInvalidDataGetId()
throws ValidationException, ObjectNotFoundException {
final PersonProgramStatusTO obj = new PersonProgramStatusTO();
obj.setId(UUID.randomUUID());
obj.setEffectiveDate(null);
controller.create(UUID.randomUUID(), obj);
fail("Create with invalid Person UUID should have thrown exception.");
}
public static PersonProgramStatusTO createProgramStatus() {
final PersonProgramStatusTO obj = new PersonProgramStatusTO();
obj.setPersonId(PERSON_ID);
obj.setObjectStatus(ObjectStatus.ACTIVE);
obj.setProgramStatusId(PROGRAM_STATUS_ID);
obj.setProgramStatusChangeReasonId(PROGRAM_STATUS_CHANGE_REASON_ID);
return obj;
}
@Test
public void testGetCurrent() throws ObjectNotFoundException,
ValidationException {
final PersonProgramStatus status = personProgramStatusService
.getCurrent(PERSON_ID);
final Date yesterday = new Date(new Date().getTime()
- (24 * 60 * 60 * 1000));
status.setExpirationDate(yesterday);
// arrange, act
final PersonProgramStatusTO programStatus = controller
.getCurrent(PERSON_ID);
// assert
assertNull(
"No PersonProgramStatus should have been returned as they are all currently expired.",
programStatus);
}
/**
* Test that getLogger() returns the matching log class name for the current
* class under test.
*/
@Test
public void testLogger() {
final Logger logger = controller.getLogger();
assertEquals("Log class name did not match.", controller.getClass()
.getName(), logger.getName());
}
}
|
zycgit/rsf
|
framework/src/main/java/net/hasor/rsf/container/ServiceDefine.java
|
/*
* Copyright 2008-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.hasor.rsf.container;
import net.hasor.core.Hasor;
import net.hasor.core.Provider;
import net.hasor.core.info.CustomerProvider;
import net.hasor.rsf.InterAddress;
import net.hasor.rsf.RsfBindInfo;
import net.hasor.rsf.address.RouteTypeEnum;
import net.hasor.rsf.domain.ServiceDomain;
import net.hasor.rsf.domain.warp.RsfBindInfoWrap;
import net.hasor.rsf.utils.StringUtils;
import java.util.*;
/**
* 服务对象,封装了服务元信息、RsfFilter、服务提供者(如果有)。
* @version : 2014年11月12日
* @author 赵永春(<EMAIL>)
*/
class ServiceDefine<T> extends RsfBindInfoWrap<T> implements CustomerProvider<T>, RsfBindInfo<T>, RsfDomainProvider<T> {
private final List<FilterDefine> filterList;
private Provider<? extends T> customerProvider;
private String oriFlowControl;
private final Map<RouteTypeEnum, String> oriRouteScript;
private final Set<InterAddress> oriAddressSet;
//
public ServiceDefine(Class<T> bindType) {
this(new ServiceDomain<T>(Hasor.assertIsNotNull(bindType)));
}
public ServiceDefine(ServiceDomain<T> domain) {
super(Hasor.assertIsNotNull(domain));
this.filterList = new ArrayList<FilterDefine>();
this.oriRouteScript = new HashMap<RouteTypeEnum, String>();
this.oriAddressSet = new HashSet<InterAddress>();
}
/**添加Filter*/
public void addRsfFilter(FilterDefine filterDefine) {
Hasor.assertIsNotNull(filterDefine.filterID());
for (FilterDefine filterDef : this.filterList) {
String defFilterID = filterDef.filterID();
String filterID = filterDefine.filterID();
//
if (filterID.equals(defFilterID)) {
return;
}
}
this.filterList.add(filterDefine);
}
/**获取服务上配置有效的过滤器*/
public List<FilterDefine> getFilterSnapshots() {
return Collections.unmodifiableList(this.filterList);
}
/**获取服务提供者。*/
@Override
public Provider<? extends T> getCustomerProvider() {
return this.customerProvider;
}
public void setCustomerProvider(Provider<? extends T> customerProvider) {
this.customerProvider = customerProvider;
}
//
public void addAddress(InterAddress rsfAddress) {
this.oriAddressSet.add(Hasor.assertIsNotNull(rsfAddress));
}
public Set<InterAddress> getAddressSet() {
return Collections.unmodifiableSet(this.oriAddressSet);
}
//
/** 获得流控策略 */
public String getFlowControl() {
return this.oriFlowControl;
}
public void setFlowControl(String oriFlowControl) {
this.oriFlowControl = oriFlowControl;
}
//
/** 设置路由策略 */
public void setRouteScript(RouteTypeEnum routeType, String scriptBody) {
if (routeType == null || StringUtils.isBlank(scriptBody)) {
return;
}
this.oriRouteScript.put(routeType, scriptBody);
}
public Map<RouteTypeEnum, String> getRouteScript() {
return Collections.unmodifiableMap(this.oriRouteScript);
}
//
/**获取服务元信息。*/
public ServiceDomain<T> getDomain() {
return (ServiceDomain<T>) this.getTarget();
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder("");
List<FilterDefine> defines = this.filterList;
if (defines == null) {
buffer.append(" null");
} else {
for (FilterDefine define : defines) {
buffer.append(define.filterID());
buffer.append(",");
}
}
return "ServiceDefine[Domain=" + this.getTarget() + ",Filters=" + buffer.toString() + "]";
}
}
|
gitter-badger/ZeloEngine
|
Engine/Core/LuaScript/LuaScriptManager.cpp
|
<gh_stars>0
// LuaScriptManager.cpp
// created on 2021/5/5
// author @zoloypzuo
#include "ZeloPreCompiledHeader.h"
#include "LuaScriptManager.h"
#include "Core/Resource/ResourceManager.h"
#include "Core/Plugin/Plugin.h"
using namespace Zelo::Core::Resource;
using namespace Zelo::Core::LuaScript;
void LuaBind_Main(sol::state &luaState);
template<> LuaScriptManager *Singleton<LuaScriptManager>::msSingleton = nullptr;
LuaScriptManager *LuaScriptManager::getSingletonPtr() {
return msSingleton;
}
LuaScriptManager &LuaScriptManager::getSingleton() {
assert(msSingleton);
return *msSingleton;
}
void LuaScriptManager::initialize() {
m_logger = spdlog::default_logger()->clone("lua");
initEvents();
initLuaContext();
loadLuaMain();
}
void LuaScriptManager::initLuaContext() {
set_exception_handler(luaExceptionHandler);
set_panic(luaAtPanic);
open_libraries(
// print, assert, and other base functions
sol::lib::base,
// require and other package functions
sol::lib::package,
// coroutine functions and utilities
sol::lib::coroutine,
// string library
sol::lib::string,
// functionality from the OS
sol::lib::os,
// all things math
sol::lib::math,
// the table manipulator and observer functions
sol::lib::table,
// the debug library
sol::lib::debug,
// the bit library: different based on which you're using
sol::lib::bit32,
// input/output library
sol::lib::io,
// library for handling utf8: new to Lua
sol::lib::utf8
);
LuaBind_Main(*this);
}
void LuaScriptManager::finalize() {
luaCall("Finalize");
}
void LuaScriptManager::update() {
luaCall("Update");
}
void LuaScriptManager::luaPrint(sol::variadic_args va) {
auto &logger = LuaScriptManager::getSingleton().m_logger;
// " ".join(va)
std::vector<std::string> va_string;
for (auto v: va) {
va_string.push_back(v.as<std::string>());
}
std::ostringstream oss;
std::copy(va_string.begin(), va_string.end(), std::ostream_iterator<std::string>(oss, " "));
logger->debug(oss.str());
}
void LuaScriptManager::initEvents() {
}
void LuaScriptManager::loadLuaMain() {
auto mainLuaPath = ResourceManager::getSingletonPtr()->getScriptDir() / "Lua" / "main.lua";
doFile(mainLuaPath.string());
}
void LuaScriptManager::callLuaInitializeFn() {
luaCall("Initialize");
}
void LuaScriptManager::callLuaPluginInitializeFn(Plugin *plugin) {
auto klass = this->get<sol::table>(plugin->getName());
auto init = klass.get<sol::optional<sol::protected_function>>("Initialize");
if (init.has_value()) {
luaCall(init.value());
}
}
void LuaScriptManager::callLuaPluginUpdateFn(Plugin *plugin) {
auto klass = this->get<sol::table>(plugin->getName());
auto init = klass.get<sol::optional<sol::protected_function>>("Update");
if (init.has_value()) {
luaCall(init.value());
}
}
void LuaScriptManager::doString(const std::string &luaCode) {
sol::optional<sol::error> script_result = safe_script(luaCode);
if (script_result.has_value()) {
m_logger->error("failed to dostring {}\n{}", luaCode, script_result.value().what());
throw sol::error(script_result.value().what());
}
}
void LuaScriptManager::doFile(const std::string &luaFile) {
sol::optional<sol::error> script_result = safe_script_file(luaFile);
if (script_result.has_value()) {
m_logger->error("failed to dofile {}\n{}", luaFile, script_result.value().what());
throw sol::error(script_result.value().what());
}
}
int LuaScriptManager::luaExceptionHandler(
lua_State *L,
sol::optional<const std::exception &>,
sol::string_view what) {
std::shared_ptr<spdlog::logger> &logger = LuaScriptManager::getSingletonPtr()->m_logger;
logger->error("[sol3] An exception occurred: {}", std::string(what.data(), what.size()));
lua_pushlstring(L, what.data(), what.size()); // NOLINT(readability-container-size-empty)
return 1;
}
int LuaScriptManager::luaAtPanic(lua_State *L) {
size_t message_size{};
const char *message = lua_tolstring(L, -1, &message_size);
if (message) {
std::string err(message, message_size);
lua_settop(L, 0);
std::shared_ptr<spdlog::logger> &logger = LuaScriptManager::getSingletonPtr()->m_logger;
logger->error("[sol3] An error occurred and panic has been invoked: {}", err);
throw sol::error(err);
}
lua_settop(L, 0);
throw sol::error(std::string("An unexpected error occurred and panic has been invoked"));
}
|
nroduit/jai-imageio-core
|
src/main/java/jj2000/j2k/util/MathUtil.java
|
<reponame>nroduit/jai-imageio-core
/*
* $RCSfile: MathUtil.java,v $
* $Revision: 1.1 $
* $Date: 2005-02-11 05:02:25 $
* $State: Exp $
*
* Class: MathUtil
*
* Description: Utility mathematical methods
*
*
*
* COPYRIGHT:
*
* This software module was originally developed by <NAME> and
* <NAME> (Swiss Federal Institute of Technology-EPFL); Joel
* Askelöf (Ericsson Radio Systems AB); and <NAME>, David
* Bouchard, <NAME>, <NAME> and <NAME> (Canon Research
* Centre France S.A) in the course of development of the JPEG2000
* standard as specified by ISO/IEC 15444 (JPEG 2000 Standard). This
* software module is an implementation of a part of the JPEG 2000
* Standard. Swiss Federal Institute of Technology-EPFL, Ericsson Radio
* Systems AB and Canon Research Centre France S.A (collectively JJ2000
* Partners) agree not to assert against ISO/IEC and users of the JPEG
* 2000 Standard (Users) any of their rights under the copyright, not
* including other intellectual property rights, for this software module
* with respect to the usage by ISO/IEC and Users of this software module
* or modifications thereof for use in hardware or software products
* claiming conformance to the JPEG 2000 Standard. Those intending to use
* this software module in hardware or software products are advised that
* their use may infringe existing patents. The original developers of
* this software module, JJ2000 Partners and ISO/IEC assume no liability
* for use of this software module or modifications thereof. No license
* or right to this software module is granted for non JPEG 2000 Standard
* conforming products. JJ2000 Partners have full right to use this
* software module for his/her own purpose, assign or donate this
* software module to any third party and to inhibit third parties from
* using this software module for non JPEG 2000 Standard conforming
* products. This copyright notice must be included in all copies or
* derivative works of this software module.
*
* Copyright (c) 1999/2000 JJ2000 Partners.
* */
package jj2000.j2k.util;
/**
* This class contains a collection of utility methods fro mathematical
* operations. All methods are static.
* */
public class MathUtil {
/**
* Method that calculates the floor of the log, base 2,
* of 'x'. The calculation is performed in integer arithmetic,
* therefore, it is exact.
*
* @param x The value to calculate log2 on.
*
* @return floor(log(x)/log(2)), calculated in an exact way.
* */
public static int log2(int x) {
int y,v;
// No log of 0 or negative
if (x <= 0) {
throw new IllegalArgumentException(""+x+" <= 0");
}
// Calculate log2 (it's actually floor log2)
v = x;
y = -1;
while (v>0) {
v >>=1;
y++;
}
return y;
}
/**
* Method that calculates the Least Common Multiple (LCM) of two strictly
* positive integer numbers.
*
* @param x1 First number
*
* @param x2 Second number
* */
public static final int lcm(int x1,int x2) {
if(x1<=0 || x2<=0) {
throw new IllegalArgumentException("Cannot compute the least "+
"common multiple of two "+
"numbers if one, at least,"+
"is negative.");
}
int max,min;
if (x1>x2) {
max = x1;
min = x2;
} else {
max = x2;
min = x1;
}
for(int i=1; i<=min; i++) {
if( (max*i)%min == 0 ) {
return i*max;
}
}
throw new Error("Cannot find the least common multiple of numbers "+
x1+" and "+x2);
}
/**
* Method that calculates the Least Common Multiple (LCM) of several
* positive integer numbers.
*
* @param x Array containing the numbers.
* */
public static final int lcm(int[] x) {
if(x.length<2) {
throw new Error("Do not use this method if there are less than"+
" two numbers.");
}
int tmp = lcm(x[x.length-1],x[x.length-2]);
for(int i=x.length-3; i>=0; i--) {
if(x[i]<=0) {
throw new IllegalArgumentException("Cannot compute the least "+
"common multiple of "+
"several numbers where "+
"one, at least,"+
"is negative.");
}
tmp = lcm(tmp,x[i]);
}
return tmp;
}
/**
* Method that calculates the Greatest Common Divisor (GCD) of two
* positive integer numbers.
* */
public static final int gcd(int x1,int x2) {
if(x1<0 || x2<0) {
throw new IllegalArgumentException("Cannot compute the GCD "+
"if one integer is negative.");
}
int a,b,g,z;
if(x1>x2) {
a = x1;
b = x2;
} else {
a = x2;
b = x1;
}
if(b==0) return 0;
g = b;
while (g!=0) {
z= a%g;
a = g;
g = z;
}
return a;
}
/**
* Method that calculates the Greatest Common Divisor (GCD) of several
* positive integer numbers.
*
* @param x Array containing the numbers.
* */
public static final int gcd(int[] x) {
if(x.length<2) {
throw new Error("Do not use this method if there are less than"+
" two numbers.");
}
int tmp = gcd(x[x.length-1],x[x.length-2]);
for(int i=x.length-3; i>=0; i--) {
if(x[i]<0) {
throw new IllegalArgumentException("Cannot compute the least "+
"common multiple of "+
"several numbers where "+
"one, at least,"+
"is negative.");
}
tmp = gcd(tmp,x[i]);
}
return tmp;
}
}
|
tidalwave-it/northernwind-rca-src
|
modules/CommonsUI/src/test/java/it/tidalwave/northernwind/frontend/filesystem/impl/ResourceFileNetBeansPlatformWritableFolderTest.java
|
/*
* #%L
* *********************************************************************************************************************
*
* NorthernWind - lightweight CMS
* http://northernwind.tidalwave.it - git clone <EMAIL>:tidalwave/northernwind-rca-src.git
* %%
* Copyright (C) 2013 - 2021 Tidalwave s.a.s. (http://tidalwave.it)
* %%
* *********************************************************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* *********************************************************************************************************************
*
*
* *********************************************************************************************************************
* #L%
*/
package it.tidalwave.northernwind.frontend.filesystem.impl;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.openide.filesystems.FileLock;
import org.openide.filesystems.FileObject;
import it.tidalwave.role.io.Marshallable;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import lombok.extern.slf4j.Slf4j;
import static org.mockito.Mockito.*;
import static it.tidalwave.util.test.FileComparisonUtils.*;
/***********************************************************************************************************************
*
* @author <NAME>
*
**********************************************************************************************************************/
@Slf4j
public class ResourceFileNetBeansPlatformWritableFolderTest
{
private ResourceFileNetBeansPlatformWritableFolder underTest;
private ResourceFileNetBeansPlatform fileNetBeansPlatform;
private FileObject fileObject;
private File folder;
private Marshallable marshallable;
/*******************************************************************************************************************
*
******************************************************************************************************************/
@BeforeMethod
public void setup()
{
folder = new File("target/test-results/mockFileObject");
log.info("Creating {} ...", folder.getAbsolutePath());
folder.mkdirs();
// assertThat(folder.mkdirs(), is(true));
fileObject = mock(FileObject.class);
when(fileObject.toString()).thenReturn("mockFileObject");
when(fileObject.getFileObject(anyString())).thenAnswer(invocation ->
{
final String fileName = (String)invocation.getArguments()[0];
final FileObject childFile = mock(FileObject.class);
when(childFile.toString()).thenReturn("mockFileObject/" + fileName);
final FileLock lock = mock(FileLock.class);
when(childFile.lock()).thenReturn(lock); // prevent later NPE
when(childFile.getOutputStream(any(FileLock.class))).thenAnswer(
invocation1 -> new FileOutputStream(new File(folder, fileName)));
return childFile;
});
fileNetBeansPlatform = mock(ResourceFileNetBeansPlatform.class);
when(fileNetBeansPlatform.getDelegate()).thenReturn(fileObject);
underTest = new ResourceFileNetBeansPlatformWritableFolder(fileNetBeansPlatform);
marshallable = out -> out.write("marshallable\n".getBytes(StandardCharsets.UTF_8));
}
/*******************************************************************************************************************
*
******************************************************************************************************************/
@Test
public void must_properly_write_a_text()
throws IOException
{
// when
underTest.write("fileName", "proven\u00E7al\n");
// then
final File expectedFile = new File("src/test/resources/expected-results/fileName");
final File actualFile = new File(folder, "fileName");
assertSameContents(expectedFile, actualFile);
}
/*******************************************************************************************************************
*
******************************************************************************************************************/
@Test
public void must_properly_marshall()
throws IOException
{
// when
underTest.write("marshallable", marshallable);
// then
final File expectedFile = new File("src/test/resources/expected-results/marshallable");
final File actualFile = new File(folder, "marshallable");
assertSameContents(expectedFile, actualFile);
}
}
|
textbook/impasse
|
client/src/services/passwordService.integration.test.js
|
import { rest } from "msw";
import { setupServer } from "msw/node";
import { getPassword } from "./passwordService";
describe("service integration", () => {
const password = "<PASSWORD>";
const server = setupServer(
rest.get("/api", (req, res, ctx) => {
return res(ctx.status(200), ctx.json({ password }));
})
);
beforeAll(() => server.listen());
beforeEach(() => server.resetHandlers());
afterAll(() => server.close());
it("makes a simple request", async () => {
const result = await getPassword();
expect(result).toEqual({ password });
});
it("makes a request with query parameters", async () => {
server.use(rest.get("/api", (req, res, ctx) => {
expect(req.url.searchParams.get("min")).toBe("7");
expect(req.url.searchParams.get("max")).toBe("7");
return res(ctx.status(200), ctx.json({ password }));
}));
const result = await getPassword({ min: 7, max: 7 });
expect(result).toEqual({ password });
});
it("exposes error messages on failure", async () => {
const errors = [
{ description: "pranged it", fields: ["foo"] },
{ description: "also an issue", fields: ["bar"] },
];
server.use(rest.get("/api", (req, res, ctx) => {
return res(ctx.status(400), ctx.json({ errors }));
}));
await expect(getPassword()).rejects.toMatchObject({
descriptions: expect.arrayContaining(["pranged it", "also an issue"]),
fields: expect.arrayContaining(["foo", "bar"]),
});
});
it("tolerates 5xx errors", async () => {
server.use(rest.get("/api", (req, res, ctx) => {
return res(ctx.status(500));
}));
await expect(getPassword()).rejects.toEqual({ descriptions: ["Something went wrong"], fields: [] });
});
});
|
krishnawork/Crm
|
node_modules/antd/es/date-picker/locale/lt_LT.js
|
<filename>node_modules/antd/es/date-picker/locale/lt_LT.js
import _extends from "@babel/runtime/helpers/extends";
import CalendarLocale from "rc-picker/es/locale/lt_LT";
import TimePickerLocale from '../../time-picker/locale/lt_LT'; // Merge into a locale object
var locale = {
lang: _extends({
placeholder: 'Pasirinkite datą',
yearPlaceholder: 'Pasirinkite metus',
quarterPlaceholder: 'Pasirinkite ketvirtį',
monthPlaceholder: 'Pasirinkite mėnesį',
weekPlaceholder: 'Pasirinkite savaitę',
rangePlaceholder: ['Pradžios data', 'Pabaigos data'],
rangeYearPlaceholder: ['Pradžios metai', 'Pabaigos metai'],
rangeMonthPlaceholder: ['Pradžios mėnesis', 'Pabaigos mėnesis'],
rangeWeekPlaceholder: ['Pradžios savaitė', 'Pabaigos savaitė']
}, CalendarLocale),
timePickerLocale: _extends({}, TimePickerLocale)
}; // All settings at:
// https://github.com/ant-design/ant-design/blob/master/components/date-picker/locale/example.json
export default locale;
|
gh-determined-ai/determined
|
harness/determined/launch/deepspeed.py
|
<gh_stars>0
"""
deepspeed.py is the launch layer for DeepSpeedTrial in Determined.
It launches the entrypoint script using DeepSpeed's launch process.
"""
import argparse
import logging
import os
import pathlib
import subprocess
import sys
import tempfile
import time
from typing import List
from deepspeed.launcher.runner import DEEPSPEED_ENVIRONMENT_NAME
import determined as det
from determined import constants, util
from determined.common import api
from determined.common.api import certs
hostfile_path = None
def is_using_cuda() -> bool:
val = os.getenv("CUDA_VISIBLE_DEVICES")
if val is None or len(val.strip()) == 0:
return False
else:
return True
def is_nccl_socket_ifname_env_var_set() -> bool:
val = os.getenv("NCCL_SOCKET_IFNAME")
if val is None or len(val.strip()) == 0:
return False
else:
return True
def get_hostfile_path() -> str:
global hostfile_path
# Ensure that "hostfile_path" is initialized only once. All subsquent calls
# will return the same file name. The production code calls this only once,
# but the tests call it multiple times and the tests will fail if a
# a different name is returned, because the expected value will not match
# the actual value of the command line that the test creates due to the
# difference in the file name.
if hostfile_path is None:
# When the task container uses "/tmp" from the host, having a file with
# a well-known name in a world writable directory is not only a security
# issue, but it can also cause a user's experiment to fail due to the
# file being owned by another user. Create the file securely with a
# random name to avoid file name clashes between two different
# experiments.
temp_hostfile = tempfile.NamedTemporaryFile(
prefix="/tmp/hostfile-", suffix=".txt", delete=False
)
hostfile_path = temp_hostfile.name
temp_hostfile.close()
return hostfile_path
def create_hostlist_file(
hostfile_path: pathlib.Path, num_proc_per_machine: int, ip_addresses: List[str]
) -> str:
trial_runner_hosts = ip_addresses.copy()
# In the single node case, deepspeed doesn't use pdsh so we don't need to launch sshd.
# Instead, the deepspeed launcher will use localhost as the chief worker ip.
if len(ip_addresses) == 1:
trial_runner_hosts[0] = "localhost"
os.makedirs(hostfile_path.parent, exist_ok=True)
with open(hostfile_path, "w") as hostfile:
lines = [f"{host} slots={num_proc_per_machine}\n" for host in trial_runner_hosts]
hostfile.writelines(lines)
return trial_runner_hosts[0]
def create_pid_server_cmd(allocation_id: str, num_workers: int) -> List[str]:
return [
"python3",
"-m",
"determined.exec.pid_server",
"--on-fail",
"SIGTERM",
"--on-exit",
"SIGTERM",
"--grace-period",
"5",
f"/tmp/pid_server-{allocation_id}",
str(num_workers),
"--",
]
def create_pid_client_cmd(allocation_id: str) -> List[str]:
return [
"python3",
"-m",
"determined.exec.pid_client",
f"/tmp/pid_server-{allocation_id}",
"--",
]
def create_log_redirect_cmd() -> List[str]:
return [
"python3",
"-m",
"determined.launch.wrap_rank",
"RANK",
"--",
]
def create_sshd_cmd() -> List[str]:
return [
"/usr/sbin/sshd",
"-p",
str(constants.DTRAIN_SSH_PORT),
"-f",
"/run/determined/ssh/sshd_config",
"-D",
]
def create_deepspeed_env_file() -> None:
"""Create an env var export file to pass Determined vars to the deepspeed launcher.
By default, the deepspeed launcher only keeps env vars that start with one of the following
["NCCL", "PYTHON", "MV2", "UCX"].
There are certain variables that we need to be set that we can pass to deepspeed using
a custom env vars file.
"""
INCLUDE = [
"PATH",
"LD_LIBRARY_PATH",
"USE_DEEPSPEED",
"DET_CHIEF_IP",
"DET_MANUAL_INIT_DISTRIBUTED",
]
with open(DEEPSPEED_ENVIRONMENT_NAME, "w") as f:
environ = os.environ.copy()
for k, v in environ.items():
if k in INCLUDE:
f.write(f"{k}={v}\n")
def create_run_command(master_address: str, hostfile_path: str) -> List[str]:
# Construct the deepspeed command.
deepspeed_process_cmd = [
"deepspeed",
"-H",
hostfile_path,
"--master_addr",
master_address,
"--no_python",
"--no_local_rank",
"--",
]
return deepspeed_process_cmd
def main(script: List[str]) -> int:
info = det.get_cluster_info()
assert info is not None, "must be run on-cluster"
assert info.task_type == "TRIAL", f'must be run with task_type="TRIAL", not "{info.task_type}"'
# Hack: get the resources id from the environment.
resources_id = os.environ.get("DET_RESOURCES_ID")
assert resources_id is not None, "Unable to run with DET_RESOURCES_ID unset"
# TODO: refactor websocket, data_layer, and profiling to to not use the cli_cert.
cert = certs.default_load(info.master_url)
certs.cli_cert = cert
# The launch layer should provide the chief_ip to the training code, so that the training code
# can function with a different launch layer in a different environment. Inside Determined, the
# easiest way to get the chief_ip is with container_addrs.
chief_ip = info.container_addrs[0]
# Chief IP is set as an environment variable to support nested launch layers
os.environ["DET_CHIEF_IP"] = chief_ip
# If the NCCL_SOCKET_IFNAME environment variable wasn't explicitly set by
# the user in the experiment's YAML file, then set it to the distributed
# network interface, if the value of "dtrain_network_interface" under
# "task_container_defaults" has been set in the "master.yaml".
if is_using_cuda() and not is_nccl_socket_ifname_env_var_set():
dtrain_network_interface = os.environ.get("DET_INTER_NODE_NETWORK_INTERFACE", None)
if dtrain_network_interface is not None and len(dtrain_network_interface) > 0:
os.environ["NCCL_SOCKET_IFNAME"] = dtrain_network_interface
# All ranks will need to run sshd.
run_sshd_command = create_sshd_cmd()
if info.container_rank > 0:
# Non-chief machines just run sshd.
# Mark sshd containers as daemon containers that the master should kill when all non-daemon
# containers (deepspeed launcher, in this case) have exited.
api.post(
info.master_url,
path=f"/api/v1/allocations/{info.allocation_id}/resources/{resources_id}/daemon",
cert=cert,
)
# Wrap it in a pid_server to ensure that we can't hang if a worker fails.
# This is useful for deepspeed which does not have good error handling for remote processes
# spun up by pdsh.
pid_server_cmd = create_pid_server_cmd(info.allocation_id, len(info.slot_ids))
logging.debug(
f"Non-chief [{info.container_rank}] training process launch "
f"command: {run_sshd_command}."
)
p = subprocess.Popen(pid_server_cmd + run_sshd_command)
with det.util.forward_signals(p):
return p.wait()
# We always need to set this variable to initialize the context correctly, even in the single
# slot case.
os.environ["USE_DEEPSPEED"] = "1"
# The chief has several layers of wrapper processes:
# - a top-level pid_server, which causes the whole container to exit if any local worker dies.
# - deepspeed, which launches $slots_per_trial copies of the following layers:
# - a pid_client process to contact the local pid_server
# - wrap_rank, which redirects stdin/stdout to the local container
# - harness.py, which actually does the training for the worker
pid_server_cmd = create_pid_server_cmd(info.allocation_id, len(info.slot_ids))
hostfile_path = get_hostfile_path()
master_address = create_hostlist_file(
hostfile_path=pathlib.Path(hostfile_path),
num_proc_per_machine=len(info.slot_ids),
ip_addresses=info.container_addrs,
)
cmd = create_run_command(master_address, hostfile_path)
pid_client_cmd = create_pid_client_cmd(info.allocation_id)
log_redirect_cmd = create_log_redirect_cmd()
harness_cmd = script
logging.debug(f"chief worker calling deepspeed with args: {cmd[1:]} ...")
full_cmd = pid_server_cmd + cmd + pid_client_cmd + log_redirect_cmd + harness_cmd
multi_machine = len(info.container_addrs) > 1
if not multi_machine:
p = subprocess.Popen(full_cmd)
with det.util.forward_signals(p):
return p.wait()
# Create the environment file that will be passed by deepspeed to individual ranks.
create_deepspeed_env_file()
# Set custom PDSH args:
# * bypass strict host checking
# * -p our custom port
# * other args are default ssh args for pdsh
os.environ["PDSH_SSH_ARGS"] = (
"-o PasswordAuthentication=no -o StrictHostKeyChecking=no "
f"-p {constants.DTRAIN_SSH_PORT} -2 -a -x %h"
)
# Chief worker also needs to run sshd when using pdsh and multi-machine training.
sshd_process = subprocess.Popen(run_sshd_command)
try:
# Chief machine waits for every worker's sshd to be available. All machines should be
# close to in-step by now because all machines just finished synchronizing rendezvous
# info.
deadline = time.time() + 20
for peer_addr in info.container_addrs:
util.check_sshd(peer_addr, deadline, constants.DTRAIN_SSH_PORT)
p = subprocess.Popen(full_cmd)
with det.util.forward_signals(p):
return p.wait()
finally:
sshd_process.kill()
sshd_process.wait()
def parse_args(args: List[str]) -> List[str]:
# Then parse the rest of the commands normally.
parser = argparse.ArgumentParser(
usage="%(prog)s (--trial TRIAL)|(SCRIPT...)",
description=(
"Launch a script under deepspeed on a Determined cluster, with automatic handling of "
"IP addresses, sshd containers, and shutdown mechanics."
),
)
# For legacy Trial classes.
parser.add_argument(
"--trial",
help=(
"use a Trial class as the entrypoint to training. When --trial is used, the SCRIPT "
"positional argument must be omitted."
),
)
# For training scripts.
parser.add_argument(
"script",
metavar="SCRIPT...",
nargs=argparse.REMAINDER,
help="script to launch for training",
)
parsed = parser.parse_args(args)
script = parsed.script or []
if parsed.trial is not None:
if script:
# When --trial is set, any other args are an error.
parser.print_usage()
print("error: extra arguments to --trial:", script, file=sys.stderr)
sys.exit(1)
script = det.util.legacy_trial_entrypoint_to_script(parsed.trial)
elif not script:
# There needs to be at least one script argument.
parser.print_usage()
print("error: empty script is not allowed", file=sys.stderr)
sys.exit(1)
return script
if __name__ == "__main__":
script = parse_args(sys.argv[1:])
sys.exit(main(script))
|
diko316/joqx
|
src/executor.js
|
<gh_stars>0
'use strict';
import { thenable } from "libcore";
export
function promiseGuard(method) {
function executor(helper, s1, s2, s3, s4, s5, s6, s7, s8) {
if (thenable(s1)) {
return s1.then(function (s1) {
return method.call(helper, s1, s2, s3, s4, s5, s6, s7, s8);
});
}
return method.call(helper, s1, s2, s3, s4, s5, s6, s7, s8);
}
return executor;
}
|
adamkorynta/opendcs
|
src/main/java/decodes/tsdb/alarm/xml/AlarmXioTags.java
|
<reponame>adamkorynta/opendcs
/*
* $Id$
*
* Copyright 2017 Cove Software, LLC. All rights reserved.
*
* $Log$
* Revision 1.2 2019/05/10 18:35:26 mmaloney
* dev
*
* Revision 1.1 2019/03/05 14:53:01 mmaloney
* Checked in partial implementation of Alarm classes.
*
* Revision 1.3 2018/03/23 20:12:20 mmaloney
* Added 'Enabled' flag for process and file monitors.
*
* Revision 1.2 2017/03/21 12:17:10 mmaloney
* First working XML and SQL I/O.
*
*/
package decodes.tsdb.alarm.xml;
/**
Constant tags for storing Comp Meta Data in XML Files.
*/
public class AlarmXioTags
{
public static final String AlarmGroup = "AlarmGroup";
public static final String CheckPeriodSec = "CheckPeriodSec";
public static final String ProcessMonitor = "ProcessMonitor";
public static final String name = "name";
public static final String AlarmDef = "AlarmDef";
public static final String priority = "priority";
public static final String Pattern = "Pattern";
public static final String FileMonitor = "FileMonitor";
public static final String path = "path";
public static final String MaxFiles = "MaxFiles";
public static final String hint = "hint";
public static final String MaxLMT = "MaxLMT";
public static final String Email = "Email";
public static final String OnDelete = "OnDelete";
public static final String OnExists = "OnExists";
public static final String MaxSize = "MaxSize";
public static final String lastModified = "LastModified";
public static final String enabled = "Enabled";
// File containing multiple alarm screeninga;
public static final String AlarmDefinitions = "AlarmDefinitions";
// Tags for ALARM_SCREENING
public static final String AlarmScreening = "AlarmScreening";
public static final String AppName = "AppName";
// Note Site and DataType are represented as they are in a computation
public static final String startDateTime = "startDateTime";
public static final String alarmGroupName = "alarmGroupName";
public static final String desc = "desc";
// Tags for ALARM_LIMIT_SET
public static final String AlarmLimitSet = "AlarmLimitSet";
public static final String screeningName = "screeningName";
public static final String seasonName = "season";
public static final String rejectHigh = "rejectHigh";
public static final String criticalHigh = "criticalHigh";
public static final String warningHigh = "warningHigh";
public static final String warningLow = "warningLow";
public static final String criticalLow = "criticalLow";
public static final String rejectLow = "rejectLow";
public static final String stuckDuration = "stuckDuration";
public static final String stuckTolerance = "stuckTolerance";
public static final String stuckMinToCheck = "stuckMinToCheck";
public static final String stuckMaxGap = "stuckMaxGap";
public static final String rocInterval = "rocInterval";
public static final String rejectRocHigh = "rejectRocHigh";
public static final String criticalRocHigh = "criticalRocHigh";
public static final String warningRocHigh = "warningRocHigh";
public static final String warningRocLow = "warningRocLow";
public static final String criticalRocLow = "criticalRocLow";
public static final String rejectRocLow = "rejectRocLow";
public static final String missingPeriod = "missingPeriod";
public static final String missingInterval = "missingInterval";
public static final String missingMaxValues = "missingMaxValues";
}
|
tonioshikanlu/tubman-hack
|
sources/androidx/core/os/HandlerKt.java
|
<gh_stars>1-10
package androidx.core.os;
import android.os.Handler;
import e.r;
import e.x.b.a;
import e.x.c.i;
import kotlin.Metadata;
@Metadata(bv = {1, 0, 3}, d1 = {"\u0000\"\n\u0002\u0018\u0002\n\u0002\u0010\t\n\u0000\n\u0002\u0010\u0000\n\u0000\n\u0002\u0018\u0002\n\u0002\u0018\u0002\n\u0000\n\u0002\u0018\u0002\n\u0002\b\u0005\u001a8\u0010\t\u001a\u00020\b*\u00020\u00002\u0006\u0010\u0002\u001a\u00020\u00012\n\b\u0002\u0010\u0004\u001a\u0004\u0018\u00010\u00032\u000e\b\u0004\u0010\u0007\u001a\b\u0012\u0004\u0012\u00020\u00060\u0005H\b¢\u0006\u0004\b\t\u0010\n\u001a8\u0010\f\u001a\u00020\b*\u00020\u00002\u0006\u0010\u000b\u001a\u00020\u00012\n\b\u0002\u0010\u0004\u001a\u0004\u0018\u00010\u00032\u000e\b\u0004\u0010\u0007\u001a\b\u0012\u0004\u0012\u00020\u00060\u0005H\b¢\u0006\u0004\b\f\u0010\n¨\u0006\r"}, d2 = {"Landroid/os/Handler;", "", "delayInMillis", "", "token", "Lkotlin/Function0;", "Le/r;", "action", "Ljava/lang/Runnable;", "postDelayed", "(Landroid/os/Handler;JLjava/lang/Object;Le/x/b/a;)Ljava/lang/Runnable;", "uptimeMillis", "postAtTime", "core-ktx_release"}, k = 2, mv = {1, 1, 15})
public final class HandlerKt {
public static final Runnable postAtTime(Handler handler, long j2, Object obj, a<r> aVar) {
i.f(handler, "$this$postAtTime");
i.f(aVar, "action");
HandlerKt$postAtTime$runnable$1 handlerKt$postAtTime$runnable$1 = new HandlerKt$postAtTime$runnable$1(aVar);
handler.postAtTime(handlerKt$postAtTime$runnable$1, obj, j2);
return handlerKt$postAtTime$runnable$1;
}
public static /* synthetic */ Runnable postAtTime$default(Handler handler, long j2, Object obj, a aVar, int i2, Object obj2) {
if ((i2 & 2) != 0) {
obj = null;
}
i.f(handler, "$this$postAtTime");
i.f(aVar, "action");
HandlerKt$postAtTime$runnable$1 handlerKt$postAtTime$runnable$1 = new HandlerKt$postAtTime$runnable$1(aVar);
handler.postAtTime(handlerKt$postAtTime$runnable$1, obj, j2);
return handlerKt$postAtTime$runnable$1;
}
public static final Runnable postDelayed(Handler handler, long j2, Object obj, a<r> aVar) {
i.f(handler, "$this$postDelayed");
i.f(aVar, "action");
HandlerKt$postDelayed$runnable$1 handlerKt$postDelayed$runnable$1 = new HandlerKt$postDelayed$runnable$1(aVar);
if (obj == null) {
handler.postDelayed(handlerKt$postDelayed$runnable$1, j2);
} else {
HandlerCompat.postDelayed(handler, handlerKt$postDelayed$runnable$1, obj, j2);
}
return handlerKt$postDelayed$runnable$1;
}
public static /* synthetic */ Runnable postDelayed$default(Handler handler, long j2, Object obj, a aVar, int i2, Object obj2) {
if ((i2 & 2) != 0) {
obj = null;
}
i.f(handler, "$this$postDelayed");
i.f(aVar, "action");
HandlerKt$postDelayed$runnable$1 handlerKt$postDelayed$runnable$1 = new HandlerKt$postDelayed$runnable$1(aVar);
if (obj == null) {
handler.postDelayed(handlerKt$postDelayed$runnable$1, j2);
} else {
HandlerCompat.postDelayed(handler, handlerKt$postDelayed$runnable$1, obj, j2);
}
return handlerKt$postDelayed$runnable$1;
}
}
|
LucasLaLima/canvas
|
canvas_modules/common-canvas/locales/toolbar/locales/index.js
|
/*
* Copyright 2017-2020 Elyra Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const de = require("./de.json");
const en = require("./en.json");
const es = require("./es.json");
const fr = require("./fr.json");
const itIT = require("./it.json");
const ja = require("./ja.json");
const ptBR = require("./pt-br.json");
const zhTW = require("./zh-tw.json");
const zhCN = require("./zh-cn.json");
const ru = require("./ru.json");
const eo = require("./eo.json");
module.exports = {
"de": de,
"en": en,
"es": es,
"fr": fr,
"it": itIT,
"ja": ja,
"pt-BR": ptBR,
"zh-TW": zhTW,
"zh-CN": zhCN,
"ru": ru,
"eo": eo
};
|
rksk/carbon-identity-framework-1
|
components/authentication-framework/org.wso2.carbon.identity.application.authentication.framework/src/test/java/org/wso2/carbon/identity/application/authentication/framework/services/PostAuthenticationMgtServiceTest.java
|
<reponame>rksk/carbon-identity-framework-1
/*
* Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.identity.application.authentication.framework.services;
import org.powermock.api.mockito.PowerMockito;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.wso2.carbon.identity.application.authentication.framework.config.model.SequenceConfig;
import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext;
import org.wso2.carbon.identity.application.authentication.framework.exception.PostAuthenticationFailedException;
import org.wso2.carbon.identity.application.authentication.framework.handler.request.AbstractPostAuthnHandler;
import org.wso2.carbon.identity.application.authentication.framework.handler.request.PostAuthnHandlerFlowStatus;
import org.wso2.carbon.identity.application.authentication.framework.internal.FrameworkServiceDataHolder;
import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser;
import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants;
import org.wso2.carbon.identity.testutil.IdentityBaseTest;
import java.io.IOException;
import java.util.UUID;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import static org.mockito.Matchers.any;
import static org.powermock.api.mockito.PowerMockito.doAnswer;
import static org.powermock.api.mockito.PowerMockito.when;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/**
* Tests for PostAuthenticationManagement Service.
*/
public class PostAuthenticationMgtServiceTest extends IdentityBaseTest {
private PostAuthenticationMgtService postAuthenticationMgtService = new PostAuthenticationMgtService();
private TestPostHandlerWithRedirect testPostHandlerWithRedirect = new TestPostHandlerWithRedirect();
private static final String FIRST_REDIRECT_TRIGGERED = "firstRedirectTriggered";
private static final String SECOND_REDIRECT_TRIGGERED = "secondRedirectTriggered";
private static final String ADMIN_USERNAME = "admin";
private static final String SPECIAL_USER = "specialUser";
private static final String DUMMY_EXTERNAL_ENDPOINT = "https://localhost/somecontext";
@BeforeMethod
void setup() {
testPostHandlerWithRedirect.setEnabled(true);
FrameworkServiceDataHolder.getInstance().addPostAuthenticationHandler(testPostHandlerWithRedirect);
}
@DataProvider(name = "singlePostAuthenticatorData")
public Object[][] singlePostAuthenticatorData() {
return new Object[][]{
// Sample authenticator is enabled, an admin user authenticated. Hence just a single redirection.
{true, ADMIN_USERNAME},
// No post authenticator is enabled.
{false, ADMIN_USERNAME},
// Sample authenticator is enabled. A special user is authenticated. Hence two redirections take place.
{true, SPECIAL_USER}
};
}
@Test(dataProvider = "singlePostAuthenticatorData")
public void testHandlePostAuthentication(boolean isSampleAuthenticatorEnabled, String userName) throws Exception {
HttpServletRequest request = PowerMockito.mock(HttpServletRequest.class);
HttpServletResponse response = PowerMockito.mock(HttpServletResponse.class);
AuthenticationContext context = new AuthenticationContext();
context.setContextIdentifier(String.valueOf(UUID.randomUUID()));
Cookie[] cookies = new Cookie[1];
doAnswer((mock) -> cookies[0] = (Cookie) mock.getArguments()[0]).when(response).addCookie(any(Cookie.class));
addSequence(context, true);
setUser(context, userName);
if (!isSampleAuthenticatorEnabled) {
this.testPostHandlerWithRedirect.setEnabled(false);
}
postAuthenticationMgtService.handlePostAuthentication(request, response, context);
if (isSampleAuthenticatorEnabled && !SPECIAL_USER.equalsIgnoreCase(userName)) {
assertNotNull(context.getParameter(FIRST_REDIRECT_TRIGGERED));
when(request.getCookies()).thenReturn(cookies);
postAuthenticationMgtService.handlePostAuthentication(request, response, context);
assertTrue(Boolean.parseBoolean(context.getParameter(FrameworkConstants
.POST_AUTHENTICATION_EXTENSION_COMPLETED).toString()));
} else if (SPECIAL_USER.equalsIgnoreCase(userName)) {
assertNotNull(context.getParameter(FIRST_REDIRECT_TRIGGERED));
when(request.getCookies()).thenReturn(cookies);
postAuthenticationMgtService.handlePostAuthentication(request, response, context);
assertNull(context.getParameter(FrameworkConstants
.POST_AUTHENTICATION_EXTENSION_COMPLETED));
assertNotNull(context.getParameter(SECOND_REDIRECT_TRIGGERED));
} else {
assertTrue(Boolean.parseBoolean(context.getParameter(FrameworkConstants
.POST_AUTHENTICATION_EXTENSION_COMPLETED).toString()));
}
postAuthenticationMgtService.handlePostAuthentication(request, response, context);
if (SPECIAL_USER.equalsIgnoreCase(userName)) {
assertNotNull(context.getParameter(SECOND_REDIRECT_TRIGGERED));
}
postAuthenticationMgtService.handlePostAuthentication(request, response, context);
}
@DataProvider(name = "singlePostAuthenticatorUnsuccessData")
public Object[][] singlePostAuthenticatorUnsuccessData() {
return new Object[][]{
// Alter cookie before sending response to first redirection. Hence should fail
{true, ADMIN_USERNAME},
// If the user is neither an admin, nor special user, them post authentication should fail.
{false, ADMIN_USERNAME + "suffix"}
};
}
@Test(dataProvider = "singlePostAuthenticatorUnsuccessData", expectedExceptions =
PostAuthenticationFailedException.class)
public void testHandlePostAuthenticationExceptions(boolean alterCookie, String userName) throws Exception {
HttpServletRequest request = PowerMockito.mock(HttpServletRequest.class);
HttpServletResponse response = PowerMockito.mock(HttpServletResponse.class);
AuthenticationContext context = new AuthenticationContext();
context.setContextIdentifier(String.valueOf(UUID.randomUUID()));
Cookie[] cookies = new Cookie[1];
doAnswer((mock) -> cookies[0] = (Cookie) mock.getArguments()[0]).when(response).addCookie(any(Cookie.class));
addSequence(context, true);
setUser(context, userName);
postAuthenticationMgtService.handlePostAuthentication(request, response, context);
if (alterCookie && ADMIN_USERNAME.equalsIgnoreCase(userName)) {
cookies[0].setValue(cookies[0].getValue() + "gibberish");
}
when(request.getCookies()).thenReturn(cookies);
postAuthenticationMgtService.handlePostAuthentication(request, response, context);
}
private void addSequence(AuthenticationContext context, boolean isCompleted) {
SequenceConfig sequenceConfig = new SequenceConfig();
sequenceConfig.setCompleted(isCompleted);
context.setSequenceConfig(sequenceConfig);
}
private void setUser(AuthenticationContext context, String userName) {
AuthenticatedUser authenticatedUser = new AuthenticatedUser();
authenticatedUser.setAuthenticatedSubjectIdentifier(userName);
context.getSequenceConfig().setAuthenticatedUser(authenticatedUser);
}
/**
* Sample post authentication handler for tests.
*/
public static class TestPostHandlerWithRedirect extends AbstractPostAuthnHandler {
private boolean isEnabled = true;
@Override
public PostAuthnHandlerFlowStatus handle(HttpServletRequest request, HttpServletResponse response,
AuthenticationContext context)
throws PostAuthenticationFailedException {
// If not authenticated just return.
if (context.getSequenceConfig().getAuthenticatedUser() == null) {
return PostAuthnHandlerFlowStatus.SUCCESS_COMPLETED;
} else if (context.getParameter(FIRST_REDIRECT_TRIGGERED) != null && context.getParameter
(SECOND_REDIRECT_TRIGGERED) == null) {
// First redirection has taken place. Decide whether second redirection needs or can finish the flow
String authenticatedUsername = context.getSequenceConfig().getAuthenticatedUser()
.getAuthenticatedSubjectIdentifier();
if (ADMIN_USERNAME.equalsIgnoreCase(authenticatedUsername)) {
// If the user is admin, the flow is success.
return PostAuthnHandlerFlowStatus.SUCCESS_COMPLETED;
} else if (SPECIAL_USER.equalsIgnoreCase(authenticatedUsername)) {
// If the user is a special user. then do a second redirection before completing.
try {
response.sendRedirect(DUMMY_EXTERNAL_ENDPOINT);
context.addParameter(SECOND_REDIRECT_TRIGGERED, true);
return PostAuthnHandlerFlowStatus.INCOMPLETE;
} catch (IOException e) {
throw new PostAuthenticationFailedException("Error while checking admin user", "Error while " +
"redirecting");
}
} else {
throw new PostAuthenticationFailedException("Not an admin user", "User is not an admin");
}
}
if (context.getParameter(SECOND_REDIRECT_TRIGGERED) != null) {
return PostAuthnHandlerFlowStatus.INCOMPLETE;
}
try {
response.sendRedirect(DUMMY_EXTERNAL_ENDPOINT);
context.addParameter(FIRST_REDIRECT_TRIGGERED, true);
return PostAuthnHandlerFlowStatus.INCOMPLETE;
} catch (IOException e) {
throw new PostAuthenticationFailedException("Error while checking admin user", "Error while " +
"redirecting");
}
}
@Override
public boolean isEnabled() {
return this.isEnabled;
}
public void setEnabled(boolean isEnabled) {
this.isEnabled = isEnabled;
}
}
}
|
PRETgroup/sann
|
MEMOCODE_2018_Benchmarks/AIBRO/neuron/main.h
|
<filename>MEMOCODE_2018_Benchmarks/AIBRO/neuron/main.h
#ifndef MAIN_H
#define MAIN_H
#include <time.h>
#include <stdlib.h>
#include <stdio.h>
void aibro_O_outA1(int);
void aibro_O_outA2(int);
void aibro_O_outA3(int);
void aibro_O_outB1(int);
void aibro_O_outB2(int);
void aibro_O_outC(int);
#endif
|
Afro-Don-San/opensrp-client-chw-hf
|
opensrp-chw-hf/src/main/java/org/smartregister/chw/hf/presenter/FpRegisterPresenter.java
|
package org.smartregister.chw.hf.presenter;
import org.smartregister.chw.fp.contract.BaseFpRegisterContract;
import org.smartregister.chw.fp.presenter.BaseFpRegisterPresenter;
public class FpRegisterPresenter extends BaseFpRegisterPresenter {
private BaseFpRegisterContract.Interactor interactor;
public FpRegisterPresenter(BaseFpRegisterContract.View view, BaseFpRegisterContract.Model model, BaseFpRegisterContract.Interactor interactor) {
super(view, model, interactor);
this.interactor = interactor;
}
@Override
public void saveForm(String jsonString) {
interactor.saveRegistration(jsonString, this);
}
}
|
Guru2921/Laravel-React
|
resources/assets/js/pages/Dashboard/manage-listing/calendar/Calendar.js
|
<reponame>Guru2921/Laravel-React
import React from 'react';
import Calendartitle from './calendartitle/Calendartitle';
import { BrowserRouter as Router, Route, Link } from 'react-router-dom'
class Calendar extends React.Component {
render(){
return(
<div className="manage-listing-content-wrapper clearfix">
<div className="listing_whole col-md-8" id="js-manage-listing-content">
<div className="common_listpage">
<Calendartitle/>
</div>
</div>
<div className="col-md-4 import_calander">
<div className="common_ios">
<div className="calendar_settings">
<a href="#" className="slide-toggle">iCal Settings <i className="fa fa-calendar"/></a>
</div>
<div className="new_box" style={{display: 'none'}}>
<div className="box-inner">
<h5> iCal Settings<a href="#"><i className="close_imp fa fa-close"/></a></h5>
<div className="import_cal">
<a href="javascript:void(0);" id="import_button">Import Calendar</a>
<a href="javascript:void(0);" id="export_button">Export Calendar</a>
<a href="#" id="import_button">Import Calendar</a>
<a href="#" id="export_button">Export Calendar</a>
</div>
<div className="import_cal">
<ul className="imported_ical_ui">
</ul>
</div>
</div>
</div>
<div className="bow_wrapper">
<div className="instructionvideo">
<h4>Ical Instruction</h4>
<iframe width="100%" height={200} src="https://www.youtube.com/embed/OYfmmWQIxj0" frameBorder={0} allow="autoplay; encrypted-media" allowFullScreen />
</div>
<div className="instructionvideo" style={{marginTop: 15}}>
<h4>Seasonal Price Instruction</h4>
<iframe width="100%" height={200} src="https://www.youtube-nocookie.com/embed/27jDmVCmw6U" frameBorder={0} allow="autoplay; encrypted-media" allowFullScreen />
</div>
</div>
</div>
</div>
</div>
)
}
}
export default Calendar;
|
sPHENIX-Test/sPHENIX-Test
|
doxygen/da/d3c/G4__Tracking_8C.js
|
var G4__Tracking_8C =
[
[ "enu_InttDeadMapType", "da/d3c/G4__Tracking_8C.html#a4249adf7aeced09f4b32c323628db438", [
[ "kInttNoDeadMap", "da/d3c/G4__Tracking_8C.html#a4249adf7aeced09f4b32c323628db438aaf0ee439ff059f6942c898cce8d7de0a", null ],
[ "kIntt4PercentDeadMap", "da/d3c/G4__Tracking_8C.html#a4249adf7aeced09f4b32c323628db438ac30f329f3bb0b97d1c9a27b9f003ced4", null ],
[ "kIntt8PercentDeadMap", "da/d3c/G4__Tracking_8C.html#a4249adf7aeced09f4b32c323628db438aa4a48b43ca5ea1a05808cf96861953a6", null ]
] ],
[ "Tracking", "da/d3c/G4__Tracking_8C.html#a29db7ba137ab6b448bd8200c192a0a50", null ],
[ "Tracking_Cells", "da/d3c/G4__Tracking_8C.html#a4433eb58af16a1286bd71a65483a0b09", null ],
[ "Tracking_Clus", "da/d3c/G4__Tracking_8C.html#af2e3ad3bfe6a11f216bd8aed180c4b0e", null ],
[ "Tracking_Eval", "da/d3c/G4__Tracking_8C.html#ae9df33fc97bec89b2ab88ec6ea2b8ef6", null ],
[ "Tracking_Reco", "da/d3c/G4__Tracking_8C.html#acf2552f0af55b070654c44d9cb8fdbc4", null ],
[ "TrackingInit", "da/d3c/G4__Tracking_8C.html#acfcd3e9373b20e13a929f10c87290ce5", null ],
[ "vmethod", "da/d3c/G4__Tracking_8C.html#a9f6dd70af0e7e51fc87543097100ecb3", null ],
[ "g4eval_use_initial_vertex", "da/d3c/G4__Tracking_8C.html#a8626c49fb15a79102f6c402bdf395b9b", null ],
[ "init_vertexing_min_zvtx_tracks", "da/d3c/G4__Tracking_8C.html#adb45b2281eb4cad677334d281b390a1d", null ],
[ "InttDeadMapOption", "da/d3c/G4__Tracking_8C.html#ab840edeccd3dd15d4bca41d8f9a499c4", null ],
[ "laddertype", "da/d3c/G4__Tracking_8C.html#a5477894af5e433f93d81566e9cf99a7f", null ],
[ "n_gas_layer", "da/d3c/G4__Tracking_8C.html#ae9c9ce06a21b457118ae04b5c7c9cf84", null ],
[ "n_intt_layer", "da/d3c/G4__Tracking_8C.html#a44ded55a3d78189af01bacacd7756247", null ],
[ "n_maps_layer", "da/d3c/G4__Tracking_8C.html#a207aeccb8fc0996c7c9e810b86efadc3", null ],
[ "n_tpc_layer_inner", "da/d3c/G4__Tracking_8C.html#a48d1f95f1b8d56c6c0fe3f596e9a4061", null ],
[ "n_tpc_layer_mid", "da/d3c/G4__Tracking_8C.html#acbfe8785fed9d0ea7e550df07bd0a96e", null ],
[ "n_tpc_layer_outer", "da/d3c/G4__Tracking_8C.html#ae4fc58843b99ba4f2a271d94ee9eafbf", null ],
[ "nladder", "da/d3c/G4__Tracking_8C.html#a3b02c00c18f7e7eea0d6b04553de3e91", null ],
[ "offsetphi", "da/d3c/G4__Tracking_8C.html#a5abe5ffa9559579928bd3d228b10adc6", null ],
[ "sensor_radius", "da/d3c/G4__Tracking_8C.html#a6dacfb77a252479e7083c7c233d9f891", null ],
[ "tpc_layer_rphi_count_inner", "da/d3c/G4__Tracking_8C.html#a500aa089e19143eb17b5c1ffd395bda9", null ],
[ "use_primary_vertex", "da/d3c/G4__Tracking_8C.html#a02e4825a4d216f93fad1a70debfcf950", null ],
[ "use_track_prop", "da/d3c/G4__Tracking_8C.html#ab60f8e6dc0843f86ecd399a65eef9709", null ],
[ "use_truth_vertex", "da/d3c/G4__Tracking_8C.html#af26ce64a9e225d1d755a46d7342b37b2", null ]
];
|
codeka/ravaged-planets
|
include/game/entities/entity_debug.h
|
#pragma once
#include <framework/vector.h>
#include <framework/colour.h>
namespace fw {
namespace gui {
class window;
class widget;
}
namespace sg {
class scenegraph;
}
}
namespace ent {
class entity_manager;
/** These are the different flags that apply to a single entity. */
enum entity_debug_flags {
/**
* If this is set, we should render "steering vectors" which show how the entity is currently steering.
*/
debug_show_steering = 1,
debug_max_value = 2
};
/**
* This class contains some debug-related state. It registers the Ctrl+D key to enable/disable
* "entity debugging" which shows things such as steering behaviours, pathing finding and so on.
*/
class entity_debug {
private:
entity_manager *_mgr;
fw::gui::window *_wnd;
bool _just_shown;
void on_key_press(std::string keyname, bool is_down);
bool on_show_steering_changed(fw::gui::widget *w);
public:
entity_debug(entity_manager *mgr);
~entity_debug();
void initialize();
/** Called each frame to update our state. */
void update();
};
/**
* This is a class that each entity has access to and allows you to draw various lines and points
* and so on that represent the various debugging information we can visualize.
*/
class entity_debug_view {
private:
struct line {
fw::vector from;
fw::vector to;
fw::colour col;
};
std::vector<line> _lines;
public:
entity_debug_view();
~entity_debug_view();
void add_line(fw::vector const &from, fw::vector const &to,
fw::colour const &col);
void add_circle(fw::vector const ¢re, float radius,
fw::colour const &col);
void render(fw::sg::scenegraph &scenegraph, fw::matrix const &transform);
};
}
|
berlin-church/church-database
|
app/admin/option_answer.rb
|
ActiveAdmin.register OptionAnswer do
menu parent: "Questionnaires"
form do |f|
inputs '' do
input :question_option
input :attendee, :collection => Attendee.joins(:member).order('members.first_name')
end
actions
end
permit_params :question_option_id, :member_id
end
|
sweersr/visions
|
examples/data_analysis/categorical.py
|
<filename>examples/data_analysis/categorical.py
import pandas.api.types as pdt
import pandas as pd
import numpy as np
from typing import Sequence, List
from visions import visions_string, visions_integer, visions_object
from visions.core.model.relations import IdentityRelation, InferenceRelation
from visions.core.model import TypeRelation
from visions.core.model.type import VisionsBaseType
from visions.utils.coercion.test_utils import coercion_map_test, coercion_map
def to_category(series: pd.Series) -> pd.Series:
if series.isin({True, False}).all():
return series.astype(bool)
elif series.isin({True, False, None, np.nan}).all():
return series.astype("Bool")
else:
unsupported_values = series[~series.isin({True, False, None, np.nan})].unique()
raise ValueError(
"Values not supported {unsupported_values}".format(
unsupported_values=unsupported_values
)
)
def _get_relations(cls) -> List[TypeRelation]:
from visions.core.implementations.types import visions_generic
relations = [
IdentityRelation(cls, visions_generic),
InferenceRelation(
cls,
visions_string,
relationship=lambda s: coercion_map_test(cls.string_coercions)(
s.str.lower()
),
transformer=lambda s: to_category(
coercion_map(cls.string_coercions)(s.str.lower())
),
),
InferenceRelation(
cls,
visions_integer,
relationship=lambda s: s.isin({0, 1, np.nan}).all(),
transformer=to_category,
),
InferenceRelation(
cls,
visions_object,
relationship=lambda s: s.apply(type).isin([type(None), bool]).all(),
transformer=to_category,
),
]
return relations
class visions_category(VisionsBaseType):
"""**Categorical** implementation of :class:`visions.core.model.type.VisionsBaseType`.
Examples:
>>> x = pd.Series([True, False, 1], dtype='category')
>>> x in visions_category
True
"""
@classmethod
def get_relations(cls) -> Sequence[TypeRelation]:
return _get_relations(cls)
@classmethod
def contains_op(cls, series: pd.Series) -> bool:
return pdt.is_categorical_dtype(series) or pdt.is_bool_dtype(series)
|
Somtozech/Rocket.Chat.Electron
|
src/components/electron/Menu.js
|
<reponame>Somtozech/Rocket.Chat.Electron
import { remote } from 'electron';
import React, {
createContext,
forwardRef,
useCallback,
useContext,
useImperativeHandle,
useRef,
} from 'react';
import { useElementsRefValues } from '../../hooks/useElementsRefValues';
const MenuContext = createContext(() => {});
export const useMenuInvalidation = () => useContext(MenuContext);
export const Menu = forwardRef(function Menu({
children: menuItemsElements,
}, ref) {
const innerRef = useRef();
const [menuItems, clonedChildrenElements, setMenuItems] = useElementsRefValues(menuItemsElements);
const parentInvalidate = useMenuInvalidation();
const invalidate = useCallback(() => {
setMenuItems((menuItems) => [...menuItems]);
parentInvalidate();
}, [parentInvalidate, setMenuItems]);
useImperativeHandle(ref, () => {
innerRef.current = new remote.Menu();
menuItems.filter(Boolean).forEach((menuItem) => {
innerRef.current.append(menuItem);
});
return innerRef.current;
}, [menuItems]);
return <MenuContext.Provider value={invalidate}>
{clonedChildrenElements}
</MenuContext.Provider>;
});
|
thumblemonks/riot
|
lib/riot/assertion_macros/exists.rb
|
<gh_stars>10-100
module Riot
# Asserts that the result of the test is a non-nil value. This is useful in the case where you don't want
# to translate the result of the test into a boolean value
#
# asserts("test") { "foo" }.exists
# should("test") { 123 }.exists
# asserts("test") { "" }.exists
# asserts("test") { nil }.exists # This would fail
#
# You can also test for non-existince (being nil), but if you would better if you used the +nil+ macro:
#
# denies("test") { nil }.exists # would pass
# asserts("test") { nil }.nil # same thing
#
# denies("test") { "foo" }.exists # would fail
#
# @deprecated Please use +denies.nil+ instead of +asserts.exists+.
class ExistsMacro < AssertionMacro
register :exists
# (see Riot::AssertionMacro#evaluate)
def evaluate(actual)
warn "exists is deprecated; please use denies.nil instead of asserts.exists"
actual.nil? ? fail("expected a non-nil value") : pass("does exist")
end
# (see Riot::AssertionMacro#devaluate)
def devaluate(actual)
warn "exists is deprecated; please use denies.nil instead of asserts.exists"
actual.nil? ? pass("does exist") : fail("expected a nil value")
end
end
end
|
webOS-ports/configd
|
src/configd/setting/Setting.cpp
|
<reponame>webOS-ports/configd
// Copyright (c) 2017-2020 LG Electronics, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// SPDX-License-Identifier: Apache-2.0
#include <glib.h>
#include <strings.h>
#include <pbnjson.h>
#include <setting/Setting.h>
#include "Environment.h"
#include "util/Logger.hpp"
// TODO add below sentence an CMakeLists.txt
// webos_build_configured_file(files/conf/confd_setting.json sysconfdir "")
const char* Setting::DEFAULT_SETTING_FILE = INSTALL_WEBOS_SYSCONFDIR "/configd.json";
const char* Setting::DEBUG_SETTING_FILE = INSTALL_LOCALSTATEDIR "/configd_debug.json";
Setting::Setting()
: m_configuration(Object()),
m_isSnapshotBoot(false),
m_isRespawned(false),
m_bootStatus("unknown")
{
if (access(DEBUG_SETTING_FILE, R_OK) == 0) {
loadSetting(DEBUG_SETTING_FILE);
} else if (access(DEFAULT_SETTING_FILE, R_OK) == 0) {
loadSetting(DEFAULT_SETTING_FILE);
}
parseKernelCmdLine();
parsePlatform();
applySettings();
}
Setting::~Setting()
{
if (m_call && m_call->isActive()) {
m_call->cancel();
}
}
void Setting::initialize()
{
if (m_call && m_call->isActive()) {
return;
}
string url = "luna://com.webos.bootManager/getBootStatus";
pbnjson::JValue params = pbnjson::Object();
params.put("subscribe", true);
m_call = AbstractBusFactory::getInstance()->getIHandle()->call(url,
params.stringify(),
this);
}
bool Setting::isNormalStatus()
{
if (m_bootStatus == "normal")
return true;
else
return false;
}
void Setting::onReceiveCall(JValue &response)
{
if (response.hasKey("bootStatus") && (m_bootStatus != response["bootStatus"].asString())) {
m_bootStatus = response["bootStatus"].asString();
Logger::info(MSGID_MAIN,
LOG_PREPIX_FORMAT "bootStatus is changed to '%s'",
LOG_PREPIX_ARGS, m_bootStatus.c_str());
}
}
void Setting::parseKernelCmdLine()
{
ifstream file;
file.open("/proc/cmdline");
if (file.fail()) {
return;
}
string cmdline;
cmdline.assign((std::istreambuf_iterator<char>(file)), std::istreambuf_iterator<char>());
if (std::string::npos != cmdline.find("snapshot")) {
m_isSnapshotBoot = true;
}
file.close();
}
void Setting::parsePlatform()
{
m_isRespawned = g_file_test(CONFIGD_PID_FILE_PATH, G_FILE_TEST_EXISTS);
}
void Setting::applySettings()
{
if (!Logger::getInstance()->setLogType(getLogType(), getLogPath()))
cerr << "Error in setLogType" << endl;
Logger::getInstance()->setLogLevel(getLogLevel());
}
void Setting::setSetting(JValue& source, JValue& local)
{
auto it = source.children();
for (auto object = it.begin() ; object != it.end() ; ++object) {
string key = (*object).first.asString();
JValue value = (*object).second;
if (!local.hasKey(key)) {
local.put(key, value);
} else if (!value.isObject()){
local.put(key, value);
} else {
JValue v = local[key];
setSetting(value, v);
}
}
}
JValue Setting::getSetting(initializer_list<const char*> list)
{
JValue* pos = &m_configuration;
JValue result;
for (auto iter = list.begin() ; iter != list.end() ; ++iter) {
if (!pos->hasKey(*iter)) {
Logger::debug(LOG_PREPIX_FORMAT "key is not found : %s",
LOG_PREPIX_ARGS, *iter);
return nullptr;
} else {
result = (*pos)[(*iter)];
pos = &result;
}
}
return result;
}
void Setting::loadSetting(const string filename)
{
JValue value = JDomParser::fromFile(filename.c_str());
if (!value.isValid() || value.isNull()) {
Logger::error(MSGID_JSON_PARSE_FILE_ERR,
LOG_PREPIX_FORMAT "Fail Invalid Json formmated file %s",
LOG_PREPIX_ARGS, filename.c_str());
return;
}
setSetting(value, m_configuration);
}
LogType Setting::getLogType()
{
JValue value = m_configuration["logger"]["type"];
if (value.isNull()) {
return LogType_PmLog;
}
return (LogType)value.asNumber<int32_t>();
}
LogLevel Setting::getLogLevel()
{
JValue value = m_configuration["logger"]["level"];
if (value.isNull()) {
return LogLevel_Info;
}
return (LogLevel)value.asNumber<int32_t>();
}
string Setting::getLogPath()
{
JValue value = m_configuration["logger"]["path"];
if (value.isNull()) {
return "";
}
return value.asString();
}
bool Setting::isSnapshotBoot()
{
return m_isSnapshotBoot;
}
bool Setting::isRespawned()
{
return m_isRespawned;
}
void Setting::clearSettings()
{
m_configuration = Object();
}
void Setting::printSetting()
{
Logger::info(MSGID_MAIN,
LOG_PREPIX_FORMAT "LogLevel(%d) / LogType(%d) / LogPath(%s) / BootStatus(%s)",
LOG_PREPIX_ARGS, getLogLevel(), getLogType(), getLogPath().c_str(), m_bootStatus.c_str());
}
void Setting::printDebug()
{
cout << endl << "debug print" << endl;
cout << m_configuration.stringify(" ");
cout << endl << endl;
}
|
getsocial-im/getsocial-react-native-sdk
|
sdk7demo/common/MenuStyle.js
|
<filename>sdk7demo/common/MenuStyle.js<gh_stars>1-10
// @flow
import {StyleSheet, Platform, Dimensions} from 'react-native';
const {width} = Dimensions.get('window');
export const MenuStyle = StyleSheet.create({
container: {
flex: 1,
paddingTop: Platform.OS === 'ios' ? 20 : 0,
backgroundColor: '#F5FCFF',
},
footer: {
flex: 1,
height: 20,
width: width,
justifyContent: 'flex-end',
backgroundColor: 'grey',
},
footerText: {
fontSize: 14,
textAlign: 'center',
},
menuContainer: {
flex: 1,
},
listitem: {
flex: 1,
alignItems: 'center',
backgroundColor: 'white',
flexDirection: 'row',
height: 40,
width: width,
borderBottomWidth: 0.5,
borderBottomColor: 'black',
},
listitem3rows: {
flex: 1,
alignItems: 'center',
backgroundColor: 'white',
flexDirection: 'row',
height: 60,
width: width,
borderBottomWidth: 0.5,
borderBottomColor: 'black',
},
listitem4rows: {
flex: 1,
alignItems: 'center',
backgroundColor: 'white',
flexDirection: 'row',
height: 80,
width: width,
borderBottomWidth: 0.5,
borderBottomColor: 'black',
},
listitem5rows: {
flex: 1,
alignItems: 'center',
backgroundColor: 'white',
flexDirection: 'row',
height: 100,
width: width,
borderBottomWidth: 0.5,
borderBottomColor: 'black',
},
listitemWithCheckbox: {
flex: 1,
alignItems: 'center',
backgroundColor: 'white',
flexDirection: 'row',
height: 60,
width: width,
borderBottomWidth: 0.5,
borderBottomColor: 'black',
},
menuitem: {
fontSize: 18,
textAlign: 'left',
margin: 6,
},
menuitem14: {
fontSize: 14,
textAlign: 'left',
marginLeft: 6,
flexWrap: 'wrap',
},
rowEndContainer: {
flex: 1,
// width: '20%',
alignItems: 'flex-end',
},
});
|
jdekarske/astrobee
|
localization/imu_integration/include/imu_integration/imu_integrator_params.h
|
/* Copyright (c) 2017, United States Government, as represented by the
* Administrator of the National Aeronautics and Space Administration.
*
* All rights reserved.
*
* The Astrobee platform is licensed under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
#ifndef IMU_INTEGRATION_IMU_INTEGRATOR_PARAMS_H_
#define IMU_INTEGRATION_IMU_INTEGRATOR_PARAMS_H_
#include <imu_integration/imu_filter_params.h>
#include <gtsam/base/Vector.h>
#include <gtsam/geometry/Pose3.h>
#include <string>
namespace imu_integration {
struct ImuIntegratorParams {
gtsam::Vector3 gravity;
gtsam::Pose3 body_T_imu;
ImuFilterParams filter;
// From gtsam: Angular and velocity random walk expressed in degrees respectively m/s per sqrt(hr).
double gyro_sigma;
double accel_sigma;
double accel_bias_sigma;
double gyro_bias_sigma;
double integration_variance;
double bias_acc_omega_int;
};
} // namespace imu_integration
#endif // IMU_INTEGRATION_IMU_INTEGRATOR_PARAMS_H_
|
o-ran-sc/ric-app-kpimon
|
asn1c_defs/all-defs/UE-ContextKeptIndicator.h
|
<reponame>o-ran-sc/ric-app-kpimon<filename>asn1c_defs/all-defs/UE-ContextKeptIndicator.h
/*
* Generated by asn1c-0.9.29 (http://lionet.info/asn1c)
* From ASN.1 module "X2AP-IEs"
* found in "../../asn_defs/asn1/x2ap-modified-15-05.asn"
* `asn1c -fcompound-names -fno-include-deps -findirect-choice -gen-PER -no-gen-OER`
*/
#ifndef _UE_ContextKeptIndicator_H_
#define _UE_ContextKeptIndicator_H_
#include <asn_application.h>
/* Including external dependencies */
#include <NativeEnumerated.h>
#ifdef __cplusplus
extern "C" {
#endif
/* Dependencies */
typedef enum UE_ContextKeptIndicator {
UE_ContextKeptIndicator_true = 0
/*
* Enumeration is extensible
*/
} e_UE_ContextKeptIndicator;
/* UE-ContextKeptIndicator */
typedef long UE_ContextKeptIndicator_t;
/* Implementation */
extern asn_TYPE_descriptor_t asn_DEF_UE_ContextKeptIndicator;
asn_struct_free_f UE_ContextKeptIndicator_free;
asn_struct_print_f UE_ContextKeptIndicator_print;
asn_constr_check_f UE_ContextKeptIndicator_constraint;
ber_type_decoder_f UE_ContextKeptIndicator_decode_ber;
der_type_encoder_f UE_ContextKeptIndicator_encode_der;
xer_type_decoder_f UE_ContextKeptIndicator_decode_xer;
xer_type_encoder_f UE_ContextKeptIndicator_encode_xer;
per_type_decoder_f UE_ContextKeptIndicator_decode_uper;
per_type_encoder_f UE_ContextKeptIndicator_encode_uper;
per_type_decoder_f UE_ContextKeptIndicator_decode_aper;
per_type_encoder_f UE_ContextKeptIndicator_encode_aper;
#ifdef __cplusplus
}
#endif
#endif /* _UE_ContextKeptIndicator_H_ */
#include <asn_internal.h>
|
qhl0505/dmd
|
dmd-admin/src/main/java/com/dmd/mall/dto/SmsFlashPromotionProduct.java
|
package com.dmd.mall.dto;
import com.dmd.mall.model.PmsProduct;
import com.dmd.mall.model.SmsFlashPromotionProductRelation;
import lombok.Getter;
import lombok.Setter;
/**
* 限时购及商品信息封装
* Created by macro on 2018/11/16.
*/
public class SmsFlashPromotionProduct extends SmsFlashPromotionProductRelation {
@Getter
@Setter
private PmsProduct product;
}
|
ScignScape-RZ/phcg
|
cpp/src/rz/rz-kauvir/rz-code-generators/rpi/rpi-output.h
|
// Copyright <NAME> 2019.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef RPI_OUTPUT__H
#define RPI_OUTPUT__H
#include "accessors.h"
#include "flags.h"
#include <QString>
#include <QMap>
#include <QTextStream>
#include "rzns.h"
#include "relae-graph/relae-caon-ptr.h"
#include "rz-function-def/rz-function-def-syntax.h"
#include "phr-graph-core/kernel/graph/pgb-ir-build.h"
RZNS_(GBuild)
class RZ_Lisp_Graph_Visitor;
class RZ_Graph_Visitor_Phaon;
_RZNS(GBuild)
USING_RZNS(GBuild)
RZNS_(PhrGraphCore)
class PGB_IR_Build;
_RZNS(PhrGraphCore)
USING_RZNS(PhrGraphCore)
RZNS_(GVal)
class RPI_Block;
class RPI_Output
{
QList<PGB_IR_Build::Text_With_Purpose> step_forms_;
RZ_Graph_Visitor_Phaon& visitor_phaon_;
RZ_Lisp_Graph_Visitor& visitor();
caon_ptr<RPI_Block> top_level_block_;
void init_function_def_syntax();
public:
RPI_Output(RZ_Graph_Visitor_Phaon& visitor_phaon);
ACCESSORS__RGET(QList<PGB_IR_Build::Text_With_Purpose> ,step_forms)
void write(QTextStream& qts);
void init_top_level_block(PGB_IR_Build& pgb);
void build_phaon_graph(PGB_IR_Build& pgb);
};
_RZNS(GVal)
#endif //RPI_OUTPUT__H
|
usi-systems/cc
|
ns-allinone-3.35/ns-3.35/src/core/examples/main-test-sync.cc
|
/* -*- Mode:C++; c-file-style:"gnu"; indent-tabs-mode:nil; -*- */
/*
* Copyright (c) 2008 University of Washington
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation;
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "ns3/command-line.h"
#include "ns3/simulator.h"
#include "ns3/realtime-simulator-impl.h"
#include "ns3/nstime.h"
#include "ns3/log.h"
#include "ns3/system-thread.h"
#include "ns3/string.h"
#include "ns3/config.h"
#include "ns3/global-value.h"
#include "ns3/ptr.h"
#include <chrono> // seconds, milliseconds
#include <thread> // sleep_for
/**
* \file
* \ingroup core-examples
* \ingroup scheduler
* An example of scheduling events in a background thread.
*
* See \ref ns3::SystemThread,
* \ref ns3::SimulatorImpl::ScheduleWithContext
*/
using namespace ns3;
NS_LOG_COMPONENT_DEFINE ("TestSync");
namespace {
/** Check that the event functions run in the intended order. */
bool gFirstRun = false;
/** An event method called many times from the background thread. */
void
inserted_function (void)
{
NS_ASSERT (gFirstRun);
NS_LOG_UNCOND ("inserted_function() called at " <<
Simulator::Now ().GetSeconds () << " s");
}
/** An event method called many times from the main thread. */
void
background_function (void)
{
NS_ASSERT (gFirstRun);
NS_LOG_UNCOND ("background_function() called at " <<
Simulator::Now ().GetSeconds () << " s");
}
/** An event method called once from the main thread. */
void
first_function (void)
{
NS_LOG_UNCOND ("first_function() called at " <<
Simulator::Now ().GetSeconds () << " s");
gFirstRun = true;
}
/** Example class with a method for the background task. */
class FakeNetDevice
{
public:
/** Constructor. */
FakeNetDevice ();
/** The thread entry point. */
void Doit3 (void);
};
FakeNetDevice::FakeNetDevice ()
{
NS_LOG_FUNCTION_NOARGS ();
}
void
FakeNetDevice::Doit3 (void)
{
NS_LOG_FUNCTION_NOARGS ();
std::this_thread::sleep_for (std::chrono::seconds (1));
for (uint32_t i = 0; i < 10000; ++i)
{
//
// Exercise the realtime relative now path
//
Simulator::ScheduleWithContext (Simulator::NO_CONTEXT, Seconds (0.0), MakeEvent (&inserted_function));
std::this_thread::sleep_for (std::chrono::milliseconds (1));
}
}
/**
* Example use of ns3::SystemThread.
*
* This example is a complete simulation.
* It schedules \c first_function and many executions of \c background_function
* to execute in the main (foreground) thread. It also launches a background
* thread with an instance of FakeNetDevice, which schedules many instances of
* \c inserted_function.
*/
void
test (void)
{
GlobalValue::Bind ("SimulatorImplementationType",
StringValue ("ns3::RealtimeSimulatorImpl"));
FakeNetDevice fnd;
//
// Make sure ScheduleNow works when the system isn't running
//
Simulator::ScheduleWithContext (0xffffffff, Seconds (0.0), MakeEvent (&first_function));
//
// drive the progression of m_currentTs at a ten millisecond rate from the main thread
//
for (double d = 0.; d < 14.999; d += 0.01)
{
Simulator::Schedule (Seconds (d), &background_function);
}
Ptr<SystemThread> st3 = Create<SystemThread> (
MakeCallback (&FakeNetDevice::Doit3, &fnd));
st3->Start ();
Simulator::Stop (Seconds (15.0));
Simulator::Run ();
st3->Join ();
Simulator::Destroy ();
}
} // unnamed namespace
int
main (int argc, char *argv[])
{
CommandLine cmd (__FILE__);
cmd.Parse (argc, argv);
while (true)
{
test ();
}
}
|
crazyvalse/algorithms
|
src/11-binary-tree/11-binary-search-tree-n/02/m-98/or/01.test.js
|
const isValidBST = require('./01')
const { arrayToTreeNode } = require('../../../utils')
test('1', () => {
const root = arrayToTreeNode([5, 1, 4, null, null, 3, 6])
expect(isValidBST(root)).toEqual(false)
})
test('2', () => {
const root = arrayToTreeNode([2, 1, 3])
expect(isValidBST(root)).toEqual(true)
})
test('3', () => {
const root = arrayToTreeNode([1, 1])
expect(isValidBST(root)).toEqual(false)
})
|
smaclell/sharebibles-lite
|
actions/i18n.js
|
<filename>actions/i18n.js<gh_stars>1-10
import I18n from '../assets/i18n/i18n';
export default I18n;
export const UPDATE_LOCALE = 'UPDATE_LOCALE';
export const updateLocale = (locale) => (dispatch) => {
I18n.updateLocale(locale);
dispatch({
type: UPDATE_LOCALE,
locale,
});
};
|
niki4/leetcode_go
|
easy/163_missing_ranges.go
|
package main
import "fmt"
func addRange(lower, upper int) string {
if lower+2 == upper { // missed one num
return fmt.Sprint(lower + 1)
} else {
return fmt.Sprintf("%d->%d", lower+1, upper-1)
}
}
// Runtime: 0 ms, faster than 100.00% of Go
// Memory Usage: 2 MB, less than 52.38% of Go
// Time complexity : O(N), where N is the length of the input array.
// Space complexity : O(N) if we take the output into account and O(1) otherwise
func findMissingRanges(nums []int, lower int, upper int) []string {
result := make([]string, 0)
nums = append([]int{lower - 1}, nums...)
nums = append(nums, upper+1)
for i := 1; i < len(nums); i++ {
prev, curr := nums[i-1], nums[i]
if curr-prev > 1 { // missed num(s)
result = append(result, addRange(prev, curr))
}
}
return result
}
|
mgsx-dev/gdx-kit
|
core/src/net/mgsx/game/plugins/editor/systems/BindingSystem.java
|
package net.mgsx.game.plugins.editor.systems;
import com.badlogic.ashley.core.Engine;
import com.badlogic.ashley.core.EntitySystem;
import com.badlogic.gdx.utils.ObjectMap.Entry;
import net.mgsx.game.core.EditorScreen;
import net.mgsx.game.core.annotations.Editable;
import net.mgsx.game.core.annotations.EditableSystem;
import net.mgsx.game.core.annotations.Storable;
import net.mgsx.game.core.binding.Binding;
import net.mgsx.game.core.binding.BindingManager;
import net.mgsx.game.core.storage.SystemSettingsListener;
@Storable(value="core.bindings", auto=true)
@EditableSystem()
public class BindingSystem extends EntitySystem implements SystemSettingsListener
{
@Editable
public String [] bindings = {};
private EditorScreen editor;
public BindingSystem(EditorScreen editor) {
super();
this.editor = editor;
}
@Override
public void addedToEngine(Engine engine)
{
super.addedToEngine(engine);
}
@Override
public void removedFromEngine(Engine engine) {
BindingManager.clear();
super.removedFromEngine(engine);
}
@Override
public void onSettingsLoaded()
{
BindingManager.clear();
if(bindings != null)
for(String str : bindings)
{
String [] strs = str.split(":", 2);
Binding b = new Binding();
b.target = strs[0];
b.command = strs[1];
BindingManager.applyBindings(b, editor.stage);
}
}
@Override
public void beforeSettingsSaved()
{
bindings = new String[BindingManager.bindings().size];
int i=0;
for(Entry<String, Binding> entry : BindingManager.bindings()){
bindings[i++] = entry.key + ":" + entry.value.command;
}
}
}
|
kaydoh/geoq
|
geoq/workflow/admin.py
|
<filename>geoq/workflow/admin.py
# -*- coding: UTF-8 -*-
from django.contrib import admin
from .models import Role, Workflow, State, Transition, EventType, Event
class RoleAdmin(admin.ModelAdmin):
"""
Role administration
"""
list_display = ['name', 'description']
search_fields = ['name', 'description']
save_on_top = True
class WorkflowAdmin(admin.ModelAdmin):
"""
Workflow administration
"""
list_display = ['name', 'description', 'status', 'created_on', 'created_by',
'cloned_from']
search_fields = ['name', 'description']
save_on_top = True
exclude = ['created_on', 'cloned_from']
list_filter = ['status']
class StateAdmin(admin.ModelAdmin):
"""
State administration
"""
list_display = ['name', 'description', 'color']
search_fields = ['name', 'description']
save_on_top = True
class TransitionAdmin(admin.ModelAdmin):
"""
Transition administation
"""
list_display = ['name', 'from_state', 'to_state']
search_fields = ['name',]
save_on_top = True
class EventTypeAdmin(admin.ModelAdmin):
"""
EventType administration
"""
list_display = ['name', 'description']
save_on_top = True
search_fields = ['name', 'description']
class EventAdmin(admin.ModelAdmin):
"""
Event administration
"""
list_display = ['name', 'description', 'workflow', 'state', 'is_mandatory']
save_on_top = True
search_fields = ['name', 'description']
list_filter = ['event_types', 'is_mandatory']
admin.site.register(Role, RoleAdmin)
admin.site.register(Workflow, WorkflowAdmin)
admin.site.register(State, StateAdmin)
admin.site.register(Transition, TransitionAdmin)
admin.site.register(EventType, EventTypeAdmin)
admin.site.register(Event, EventAdmin)
|
98llm/tir-script-samples
|
Protheus_WebApp/Modules/SIGAPLS/PLSA090TESTCASE.py
|
import unittest
import time
from tir import Webapp
from datetime import datetime
DateSystem = datetime.today().strftime('%d/%m/%Y')
class PLSA090(unittest.TestCase):
@classmethod
def setUpClass(inst):
inst.oHelper = Webapp()
inst.oHelper.Setup("SIGAPLS", DateSystem, "T1", "M SP 01 ", "33")
inst.oHelper.Program("PLSA094A")
inst.oHelper.AddParameter("MV_PLEVSAD", "", "1", "1", "1") # Habilita Evolução SADT
inst.oHelper.AddParameter("MV_PLSSOOL", "", "1", "1", "1") # Habilita Auto. On-Line
inst.oHelper.AddParameter("MV_PLCALPG", "", "2", "2", "2") # Novo Calendario de Pagamento
inst.oHelper.SetParameters()
#//-------------------------------------------------------------------
# {Protheus.doc} test_PLSA090_PL001
# Teste 01 - Consulta Autorizada
# @author vinicius.queiros
# @since 02/10/2020
# @version 12
# @see
#//-------------------------------------------------------------------
def test_PLSA090_PL001(self):
# Dados para o teste
matricula = "00019875000001038"
rda = "000004"
cid = "Z000"
solicitante = "013500"
tiposaida = "1 - Retorno"
TipoAtendimento = "04 - Consulta"
procedimento = "10101012"
status = "1 - Autorizada"
self.oHelper.SetButton('Incluir')
self.oHelper.SetBranch("M SP 01 ")
self.oHelper.SetValue('BE1_USUARI', matricula)
self.oHelper.SetValue('BE1_CODRDA', rda)
self.oHelper.SetValue('BE1_CID', cid)
self.oHelper.SetValue('BE1_REGSOL', solicitante)
self.oHelper.SetValue('BE1_TIPSAI', tiposaida)
self.oHelper.SetValue('BE1_TIPATE', TipoAtendimento)
self.oHelper.ClickGridCell("Cd. Proc.")
self.oHelper.SetKey("ENTER", grid=True)
self.oHelper.SetValue('BE2_CODPRO', procedimento)
self.oHelper.CheckResult("BE2_STATUS", status)
self.oHelper.SetButton('Salvar')
self.oHelper.SetButton('Salvar')
self.oHelper.SetButton('Ok')
self.oHelper.AssertTrue()
#//-------------------------------------------------------------------
# {Protheus.doc} test_PLSA090_PL002
# Teste 02 - SADT Autorizada
# @author vinicius.queiros
# @since 02/10/2020
# @version 12
# @see
#//-------------------------------------------------------------------
def test_PLSA090_PL002(self):
# Dados para o teste
matricula = "00019875000001038"
rda = "000004"
cid = "Z000"
solicitante = "013500"
tiposaida = "2 - Retorno com SADT"
TipoAtendimento = "05 - Exame"
procedimento = "40304361"
status = "1 - Autorizada"
self.oHelper.SetValue('BE1_USUARI', matricula)
self.oHelper.SetValue('BE1_CODRDA', rda)
self.oHelper.SetValue('BE1_CID', cid)
self.oHelper.SetValue('BE1_REGSOL', solicitante)
self.oHelper.SetValue('BE1_TIPSAI', tiposaida)
self.oHelper.SetValue('BE1_TIPATE', TipoAtendimento)
self.oHelper.ClickGridCell("Cd. Proc.")
self.oHelper.SetKey("ENTER", grid=True)
self.oHelper.SetValue('BE2_CODPRO', procedimento)
self.oHelper.CheckResult("BE2_STATUS", status)
self.oHelper.SetButton('Salvar')
self.oHelper.SetButton('Salvar')
self.oHelper.SetButton('Ok')
self.oHelper.SetButton('Cancelar')
self.oHelper.AssertTrue()
#//-------------------------------------------------------------------
# {Protheus.doc} test_PLSA090_PL003
# Teste 03 - Complemento SADT - Autorizada
# @author vinicius.queiros
# @since 02/10/2020
# @version 12
# @see
#//-------------------------------------------------------------------
def test_PLSA090_PL003(self):
# Dados para o teste
procedimento = "40303136"
status = "1 - Autorizada"
responsavel = "Responsavel Teste 03"
chave = "M SP 000120201000000001"
self.oHelper.SearchBrowse(f'{chave}', key=1, index=True)
self.oHelper.SetButton('Outras Ações',sub_item='Evolução Sadt')
self.oHelper.ClickFolder("Complem. SADT")
self.oHelper.ClickGridCell("Cd Proc Prin")
self.oHelper.SetKey("ENTER", grid=True)
self.oHelper.SetValue('BQV_CODPRO', procedimento)
self.oHelper.SetValue('BQV_RESAUT', responsavel)
self.oHelper.CheckResult("BQV_STATUS", status)
self.oHelper.SetButton('Salvar')
self.oHelper.SetButton('Confirmar')
self.oHelper.AssertTrue()
#//-------------------------------------------------------------------
# {Protheus.doc} test_PLSA090_PL004
# Teste 04 - Exclusão - Consulta
# @author vinicius.queiros
# @since 02/10/2020
# @version 12
# @see
#//-------------------------------------------------------------------
def test_PLSA090_PL004(self):
# Dados para o teste
chave = "M SP 000120201000000002"
self.oHelper.SearchBrowse(f'{chave}', key=1, index=True)
self.oHelper.SetButton('Outras Ações',sub_item='Excluir')
self.oHelper.SetButton('Confirmar')
self.oHelper.AssertTrue()
@classmethod
def tearDownClass(inst):
inst.oHelper.TearDown()
if __name__ == "__main__":
unittest.main()
|
NerdThings/Ngine
|
src/Graphics/Image.h
|
<filename>src/Graphics/Image.h
/**********************************************************************************************
*
* Ngine - The 2D game engine.
*
* Copyright (C) 2019 NerdThings
*
* LICENSE: Apache License 2.0
* View: https://github.com/NerdThings/Ngine/blob/master/LICENSE
*
**********************************************************************************************/
#ifndef IMAGE_H
#define IMAGE_H
#include "../Ngine.h"
#include "../Filesystem/Filesystem.h"
#include "../Resource.h"
namespace NerdThings::Ngine::Graphics {
/*
* Image/Texture Pixel Format.
* Corresponds with OpenGL Abstraction values.
*/
enum PixelFormat {
UNCOMPRESSED_GRAYSCALE = 1,
UNCOMPRESSED_GRAY_ALPHA,
UNCOMPRESSED_R5G6B5,
UNCOMPRESSED_R8G8B8,
UNCOMPRESSED_R5G5B5A1,
UNCOMPRESSED_R4G4B4A4,
UNCOMPRESSED_R8G8B8A8,
UNCOMPRESSED_R32,
UNCOMPRESSED_R32G32B32,
UNCOMPRESSED_R32G32B32A32,
COMPRESSED_DXT1_RGB,
COMPRESSED_DXT1_RGBA,
COMPRESSED_DXT3_RGBA,
COMPRESSED_DXT5_RGBA,
COMPRESSED_ETC1_RGB,
COMPRESSED_ETC2_RGB,
COMPRESSED_ETC2_EAC_RGBA,
COMPRESSED_PVRT_RGB,
COMPRESSED_PVRT_RGBA,
COMPRESSED_ASTC_4x4_RGBA,
COMPRESSED_ASTC_8x8_RGBA
};
/*
* An image stored in CPU memory.
*/
struct NEAPI Image : public IResource {
// Public Fields
/*
* Pixel format
*/
PixelFormat Format = UNCOMPRESSED_GRAYSCALE;
/*
* Image height
*/
int Height = 0;
/*
* Image mipmaps
*/
int Mipmaps = 0;
/*
* The raw pixel data pointer
*/
unsigned char *PixelData = nullptr;
/*
* Image width
*/
int Width = 0;
// Public Constructors
/*
* Create a null image
*/
Image();
/*
* Load an image file
*/
Image(const Filesystem::Path &path_);
/*
* Create an image from raw pixel data.
* The pixel data will be copied.
*/
Image(unsigned char *pixelData_, int width_, int height_, PixelFormat format_);
// Public Methods
/*
* Test whether or not the image is valid
*/
bool IsValid() const override;
/*
* Load an image
*/
static Image *LoadImage(const Filesystem::Path &path_);
/*
* Load raw pixel data.
* The pixel data will be copied.
*/
static Image *LoadPixels(unsigned char *pixelData_, int width_, int height_, PixelFormat format_);
/*
* Unload image from memory.
*/
void Unload() override;
};
}
#endif //IMAGE_H
|
sebastien-belin-adp/interlok-fx-installer
|
src/main/java/com/adaptris/installer/helpers/GradleBuildRunner.java
|
package com.adaptris.installer.helpers;
import java.io.OutputStream;
import java.nio.file.Path;
import org.gradle.tooling.BuildLauncher;
import org.gradle.tooling.GradleConnector;
import org.gradle.tooling.events.ProgressListener;
public class GradleBuildRunner {
private static final String CLEAN_TASK = "clean";
private static final String ASSEMBLE_TASK = "assemble";
private OutputStream standardOutput;
private OutputStream standardError;
private ProgressListener progressListener;
public GradleBuildRunner(OutputStream standardOutput, OutputStream standardError, ProgressListener progressListener) {
this.standardOutput = standardOutput;
this.standardError = standardError;
this.progressListener = progressListener;
}
public void run(Path buildGradleDirPath) {
GradleConnector connector = GradleConnector.newConnector().forProjectDirectory(buildGradleDirPath.toFile());
BuildLauncher buildLauncher = connector.connect().newBuild()
.setStandardOutput(standardOutput)
.setStandardError(standardError)
.addProgressListener(progressListener)
.setColorOutput(true);
buildLauncher.forTasks(CLEAN_TASK, ASSEMBLE_TASK).run();
}
}
|
alby-corp/ultimate-pizza-manager
|
app/core/pipes/component-builder.pipe.js
|
import {Pipe} from "./pipe";
import {
ActivatedGuardCheckerStep,
ComponentFirerStep,
ComponentRenderStep,
OutletValidatorStep
} from "../steps";
export class ComponentBuilderPipe extends Pipe {
constructor(config) {
super(config, [OutletValidatorStep, ActivatedGuardCheckerStep, ComponentRenderStep, ComponentFirerStep]);
}
}
|
Javabyh/Graduation_Project
|
byh-frarmwork/common-entitys/src/main/java/com/aiit/byh/service/common/entity/validate/Number.java
|
<filename>byh-frarmwork/common-entitys/src/main/java/com/aiit/byh/service/common/entity/validate/Number.java
/**
* @Title: Number.java
* @Package com.aiit.byhc.basic.validation
* @author sdwan
* @date 2014-4-18
* @version V1.0
*/
package com.aiit.byh.service.common.entity.validate;
import javax.validation.Constraint;
import javax.validation.Payload;
import java.lang.annotation.*;
/**
* @Description: 纯数字格式验证
*
*/
@Target( { ElementType.METHOD, ElementType.FIELD, ElementType.ANNOTATION_TYPE })
@Retention(RetentionPolicy.RUNTIME)
@Constraint(validatedBy = NumberValidator.class)
@Documented
public @interface Number{
int min() default 0;
int max() default Integer.MAX_VALUE;
String message();
Class<?>[] groups() default {};
Class<? extends Payload>[] payload() default {};
}
|
andre-becker/neodymium-library
|
src/test/java/com/xceptance/neodymium/testclasses/context/cucumbercontextclear/CucumberContextSteps.java
|
package com.xceptance.neodymium.testclasses.context.cucumbercontextclear;
import org.junit.Assert;
import com.xceptance.neodymium.util.Neodymium;
import com.xceptance.neodymium.util.NeodymiumConfiguration;
import com.xceptance.neodymium.util.WebDriverUtils;
import cucumber.api.Scenario;
import cucumber.api.java.After;
import cucumber.api.java.Before;
import cucumber.api.java.en.Given;
public class CucumberContextSteps
{
@Before
public void beforeTest()
{
// setup browser since we set our selenide defaults only if a browser is involved
WebDriverUtils.setUp("Chrome_headless");
}
@After(order = 100)
public void teardown(Scenario scenario)
{
WebDriverUtils.tearDown(scenario);
}
@Given("Change timeout to 4000")
public void testAndChangeDefaultTimeout()
{
NeodymiumConfiguration configuration = Neodymium.configuration();
Assert.assertEquals(3000, configuration.selenideTimeout());
configuration.setProperty("neodymium.selenide.timeout", "4000");
Assert.assertEquals(4000, configuration.selenideTimeout());
}
@Given("Assert timeout of 3000")
public void testDefaultTimeout()
{
NeodymiumConfiguration configuration = Neodymium.configuration();
Assert.assertEquals(3000, configuration.selenideTimeout());
}
}
|
LorenzBuehmann/SANSA-Stack
|
sansa-ml/sansa-ml-spark/src/main/scala/net/sansa_stack/ml/spark/featureExtraction/FeatureExtractingSparqlGenerator.scala
|
<filename>sansa-ml/sansa-ml-spark/src/main/scala/net/sansa_stack/ml/spark/featureExtraction/FeatureExtractingSparqlGenerator.scala
package net.sansa_stack.ml.spark.featureExtraction
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}
import net.sansa_stack.ml.spark.utils.{ConfigResolver, SPARQLQuery}
import net.sansa_stack.rdf.common.io.riot.error.{ErrorParseMode, WarningParseMode}
import org.apache.jena.graph.{Node, NodeFactory, Triple}
import org.apache.jena.riot.RDFLanguages
import org.apache.spark.sql.functions._
import org.apache.spark.sql.{DataFrame, Dataset, Encoder, Encoders, Row, SparkSession}
import net.sansa_stack.rdf.spark.io._
import net.sansa_stack.rdf.spark.model._
import scala.collection.mutable.ListBuffer
import scala.util.control.Breaks.{break, breakable}
import net.sansa_stack.query.spark.SPARQLEngine
import org.apache.spark.rdd.RDD
object FeatureExtractingSparqlGenerator {
val _literalReplacementString = "literalReplacement"
val _literalReplacement = NodeFactory.createLiteral(_literalReplacementString)
/**
* create on string level the seed fetching query
*
* @param seedVarName projection var name for seed element
* @param seedWhereClause where clause how seed can be fetched
* @param sortedByLinks boolean value if seeds should be ordered by outgoing links in desc order or fifo seeds
* @return string representing the seed fetching sparql query
*/
def createSeedFetchingSparql(seedVarName: String, seedWhereClause: String, sortedByLinks: Boolean): String = {
val seedFetchingSparql = sortedByLinks match {
case true => f"SELECT DISTINCT $seedVarName \nWHERE { $seedWhereClause \n\tOptional { $seedVarName ?p ?o. } } \ngroup by $seedVarName ORDER BY DESC ( count(?p) ) "
case false => f"SELECT DISTINCT $seedVarName \n WHERE { $seedWhereClause} "
}
// TODO make log println(f"the generated seed fetching sparql is:\n$seedFetchingSparql")
seedFetchingSparql
}
/**
* creates dataframe for traversing over join
*
* @param df dataframe representing entire graph
* @return dataframes for traversing up (which is same as df and down which is up flipped and added the traverse direction column)
*/
def createDataframesToTraverse(df: DataFrame): (DataFrame, DataFrame) = {
df.toDF(Seq("s", "p", "o"): _*)
// df.printSchema() TODO be aware that we are operating sometimes on string sometimes on apache jena node level
// down
val down: DataFrame = df.withColumn("dir", typedLit("down"))
// up
implicit val nodeTupleEncoder = Encoders.kryo(classOf[(Node, Node, Node)])
val isNotLiteral = udf((n: String) => {
!n.startsWith("\"")
// !n.asInstanceOf[Node].isLiteral()
})
val up: DataFrame = df
.toDF(Seq("o", "p", "s"): _*)
.withColumn("dir", typedLit("up"))
.where(isNotLiteral(col("s"))) // TODO would be good to operate here on node and not on string level
(up, down)
}
/**
* traverses a tree by joining dataframes of current paths and traversable hops
*
* @param paths current paths initially started at seeds
* @param traverseDf the dataframe giving traversal opportunities
* @param iterationLimit how deep to traverse or how often join should be performed max
* @param traverseDirection direction whether up or down
* @return the traversed dataframe with current paths after traverse up, and paths ending with literals after traverse down
*/
def traverse(paths: DataFrame, traverseDf: DataFrame, iterationLimit: Int, traverseDirection: String, numberRandomWalks: Int = 0): DataFrame = {
val spark: SparkSession = SparkSession.builder()
.getOrCreate()
var dataFramesWithOpenEnd: DataFrame = paths
var dataframeWithLiteralEnd: DataFrame = spark.emptyDataFrame
var currentPaths: DataFrame = paths.cache()
traverseDf.cache()
breakable {
for (iteration <- 0 to (iterationLimit - 1)) {
// set iterators
val columnName = f"n_$iteration"
val iterationPlusOne: Int = iteration + 1
val columnNamePlusOne = f"n_$iterationPlusOne"
// paths to merge
val left: DataFrame = currentPaths
// here we partially simulate random walt behavior
val right: DataFrame = traverseDf.toDF(Seq(f"n_$iteration", f"p_$iteration", f"n_$iterationPlusOne", f"dir_$iteration"): _*)
// this joins the next hop
// here we partially simulate random walt behavior
val joinedPaths: DataFrame = numberRandomWalks match {
case 0 => left.join(right, columnName)
case _ => left.join(right, columnName).sample(true, 2D * numberRandomWalks / traverseDf.count()).limit(numberRandomWalks)
}
val isLiteral = udf((cellElement: String) => {
if (cellElement == _literalReplacementString) true
else false
})
// they end with not literal.
currentPaths = joinedPaths.where(!isLiteral(col(columnNamePlusOne)))
// final paths are paths which end with literal
// this can only happen when traversing down
val finalPaths = joinedPaths.where(isLiteral(col(columnNamePlusOne)))
// filter out cyclic paths from currentPaths
val noCycle = udf((row: Row) => {
val l = row.toSeq.toList
.filter(_ != None)
.filter(_ != null)
val lFromSet = l.toSet
l.length == lFromSet.size
})
val nNamedColumns = currentPaths.columns.filter(_.startsWith("n_")).toList
currentPaths = currentPaths.where(noCycle(struct(nNamedColumns.map(col): _*)))
// println(s"$iteration filtered current paths")
// append the paths we finally traversed until literal is reached
// in up this will not happen
if (finalPaths.count() > 0) {
val recentColumns: Seq[String] = dataframeWithLiteralEnd.columns.toSeq
val noneColumnsToAdd: Seq[String] = finalPaths.columns.toSeq.toSet.diff(recentColumns.toSet).toSeq
var df1 = dataframeWithLiteralEnd
for (c <- noneColumnsToAdd) df1 = df1.withColumn(c, lit(null: String))
val df2 = finalPaths
val df3 = df2.union(df1.select(df2.columns.map(col(_)): _*))
dataframeWithLiteralEnd = df3
// dataframeWithLiteralEnd = dataframeWithLiteralEnd.union(finalPaths)
}
if (currentPaths.count() == 0) {
// println(f"no remaining paths are available so: $traverse_direction is done")
break
}
// if we traverse up we change column names s.t. last element added is always in column n0 s.t. join in traverse down is easier
if (traverseDirection == "up") {
val tmpPaths: DataFrame = currentPaths
val tmpColumns = tmpPaths.columns.toSeq
val newTmpColumns: Seq[String] = tmpColumns.map(c => {
val currentNumber: Int = c.split("_").last.toInt
val currentChars: String = c.split("_")(0)
val newNumber = (currentNumber - iterationPlusOne).toString
val newColumnName = currentChars + "_" + newNumber
newColumnName
})
val recentColumns: Seq[String] = dataFramesWithOpenEnd.columns.toSeq
val noneColumnsToAdd = newTmpColumns.toSet.diff(recentColumns.toSet).toSeq
var df1 = dataFramesWithOpenEnd
for (c <- noneColumnsToAdd) df1 = df1.withColumn(c, lit(null: String))
val df2 = tmpPaths.toDF(newTmpColumns: _*)
val df3 = df2.union(df1.select(df2.columns.map(col(_)): _*))
dataFramesWithOpenEnd = df3
}
}
}
val dfLit = numberRandomWalks match {
case 0 => dataframeWithLiteralEnd
case _ => dataframeWithLiteralEnd.sample(true, 2D * numberRandomWalks / dataframeWithLiteralEnd.count()).limit(numberRandomWalks)
}
val returnDataframe = traverseDirection match {
case "up" => dataFramesWithOpenEnd
case "down" => dfLit
}
returnDataframe
}
/**
* creates a string corresponding to an OPTIONAL block for where part in resulting sparql
*
* @param row row from dataframe created by traversing all paths
* @param seedVarName name of seed projection var
* @return string representing OPTIONAL block
*/
def rowToQuery(row: Row, seedVarName: String, featuresInOptionalBlocks: Boolean): (String, String) = {
val nonNullRow: List[String] = row.toSeq.toList.filter(_ != None).filter(_ != null).asInstanceOf[List[String]]
val lenRow: Int = nonNullRow.size
val numberQueryLines: Int = (lenRow - 1) / 3
var varNames = ListBuffer(seedVarName)
var projectionVar: String = ""
var queryStr = if (featuresInOptionalBlocks) "\tOPTIONAL {\n" else ""
for (queryLineNumber <- 0 to (numberQueryLines - 1)) {
val leftN = nonNullRow(queryLineNumber * 3)
val p = nonNullRow((queryLineNumber * 3) + 1)
val direction = nonNullRow((queryLineNumber * 3) + 2)
val rightN = nonNullRow((queryLineNumber * 3) + 3)
var firstVarName = varNames.last
var secondVarName = firstVarName + f"__$direction" + "_" + p.toString.split("/").last.replace("#", "_").replace(".", "").replace("-", "")
varNames.append(secondVarName)
val query_line: String = direction match {
case "down" => f"$firstVarName <$p> $secondVarName ."
case "up" => f"$secondVarName <$p> $firstVarName ."
}
queryStr = if (featuresInOptionalBlocks) queryStr + f"\t\t$query_line\n" else queryStr + f"\t$query_line\n"
projectionVar = secondVarName
}
queryStr = if (featuresInOptionalBlocks) queryStr + "\t}" else queryStr
(queryStr, projectionVar)
}
/**
* this function creates the sparql and a list of corresponding porjection variables
*
* the function operates on dataframe level and first fetches the seeds
* then seeds are cutoff to the desired number or ration to be considered
* from seeds we traverse up in the graph
* traverse down
* create for each traversed path a query line
* take unique query lines
* create sparql query
*
* @param ds dataset of triple of true columns of type string representing triples s p o
* @param seedVarName how the seeds should be named and with beginning question mark as needed for projection variable
* @param seedWhereClause a string representing the where part of a sparql query specifying how to reach seeds
* @param maxUp integer for limiting number of traversal up steps
* @param maxDown integer for limiting traverse down steps
* @param numberSeeds number of seeds to consider
* @param ratioNumberSeeds number of seeds specified by ratio
* @return string of resulting sparql and list of string for each projection variable which later can be used for dataframe column naming
*/
def createSparql(
ds: Dataset[org.apache.jena.graph.Triple],
seedVarName: String,
seedWhereClause: String,
maxUp: Int,
maxDown: Int,
numberSeeds: Int = 0,
ratioNumberSeeds: Double = 1.0,
numberRandomWalks: Int = 0,
sortedByLinks: Boolean = false,
featuresInOptionalBlocks: Boolean = true,
): (String, List[String]) = {
// create the sparql to reach seeds and maybe sort them by ths sparql as well
val seedFetchingSparql: String = createSeedFetchingSparql(seedVarName, seedWhereClause, sortedByLinks)
// query for seeds and list those
val sparqlFrame = new SparqlFrame()
.setSparqlQuery(seedFetchingSparql)
.setQueryExcecutionEngine(SPARQLEngine.Sparqlify)
val seedsDf: DataFrame = sparqlFrame.transform(ds).toDF("n_0").cache()
if (seedsDf.count() == 0) {
throw new Exception(s"The sparql query hasn't resulted in any seed entity!")
}
// TODO make log println(f"the fetched seeds are:\n${seeds.mkString("\n")}\n")
val numberOfSeeds: Int = seedsDf.count().toInt
// calculate cutoff
val cutoff = if (numberSeeds > 0) numberSeeds else math.rint(numberOfSeeds * ratioNumberSeeds).toInt
val usedSeedsDf: DataFrame = seedsDf.limit(cutoff).toDF("n_0")
val spark = SparkSession.builder
.getOrCreate()
import spark.implicits._
implicit val rdfTripleEncoder: Encoder[Triple] = org.apache.spark.sql.Encoders.kryo[Triple]
implicit val nodeEncoder = Encoders.kryo(classOf[Node])
implicit val rowEncoder = Encoders.kryo(classOf[Row])
/* val tmpRdd: RDD[Seq[String]] = ds.map((triple: org.apache.jena.graph.Triple) => if (triple.getObject.isLiteral) Seq(triple.getSubject.toString(), triple.getPredicate.toString(), _literalReplacementString) else Seq(triple.getSubject.toString(), triple.getPredicate.toString(), triple.getObject().toString())).rdd
val df: DataFrame = spark.createDataFrame(
tmpRdd
).toDF(Seq("s", "p", "o"): _*).cache()
*/
val df: DataFrame = ds.map((triple: org.apache.jena.graph.Triple) => if (triple.getObject.isLiteral) Triple.create(triple.getSubject, triple.getPredicate, _literalReplacement) else triple).rdd.toDF().toDF(Seq("s", "p", "o"): _*).cache()
// create dataframes for traversal (up and down)
val (up: DataFrame, down: DataFrame) = createDataframesToTraverse(df)
up.cache()
down.cache()
// seeds in dataframe as starting paths
// TODO make log println(s"we start initially with following seeds (after cutoff):\n${usedSeedsAsString.mkString("\n")}")
// println("initial paths, so seeds are:")
var paths: DataFrame = usedSeedsDf // usedSeedsAsString.toDF("n_0").cache() // seedsDf.map(_.toString).limit(cutoff).toDF("n0")
// paths.show(10, false)
// traverse up
// println("traverse up")
paths = traverse(paths, up, iterationLimit = maxUp, traverseDirection = "up", numberRandomWalks = numberRandomWalks).cache()
// paths.show(10, false)
// traverse down
// println("traverse down")
paths = traverse(paths, down, iterationLimit = maxDown, traverseDirection = "down", numberRandomWalks = numberRandomWalks).cache()
// paths.show(10, false)
// all gathered paths
// println("gathered paths")
val columns = paths.columns.toList
val newColumnsOrder: Seq[String] = columns
.map(_.split("_").last.toInt)
.distinct
.sorted
.dropRight(1)
.flatMap(i => (f"n_$i p_$i dir_$i n_${i + 1}").split(" "))
.distinct
paths = paths.select(newColumnsOrder.map(col(_)): _*).cache()
val results = paths.rdd.map(rowToQuery(_, seedVarName, featuresInOptionalBlocks)).cache()
val queryLines: List[String] = results.map(_._1.toString).collect().toList.distinct.sortBy(_.size)
val projectionVars: List[String] = results.map(_._2.toString).collect().toList.distinct.sortBy(_.size)
val projection_vars_string = projectionVars.mkString(" ")
val all_optional_query_blocks_str = queryLines.mkString("\n")
val total_query = f"SELECT $seedVarName $projection_vars_string\n\nWHERE {\n\t${seedWhereClause}\n\n$all_optional_query_blocks_str \n}"
(total_query, projectionVars)
}
/**
* the main function call the entire process
*
* all configuration have to be done in a config file. this allows easier interaction as soon as a standalone jar has been created.
*
* @param args path to the typesafe conf file
*/
def main(args: Array[String]): Unit = {
val configFilePath = args(0)
val config = new ConfigResolver(configFilePath).getConfig()
println(config)
val inputFilePath: String = config.getString("inputFilePath")
val outputFilePath: String = config.getString("outputFilePath")
val seedVarName = config.getString("seedVarName")
val whereClauseForSeed = config.getString("whereClauseForSeed")
val maxUp: Int = config.getInt("maxUp")
val maxDown: Int = config.getInt("maxDown")
val seedNumber: Int = config.getInt("seedNumber")
val seedNumberAsRatio: Double = config.getDouble("seedNumberAsRatio")
val numberRandomWalks: Int = config.getInt("numberRandomWalks")
val sortedByLinks = config.getBoolean("sortedByLinks")
// val hardCodedSeeds: List[String] = config.getStringList("hardCodedSeeds").asScala.toList
// setup spark session
val spark = SparkSession.builder
.appName(s"rdf2feature")
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.config("spark.kryo.registrator", String.join(", ",
"net.sansa_stack.rdf.spark.io.JenaKryoRegistrator",
"net.sansa_stack.query.spark.sparqlify.KryoRegistratorSparqlify",
"net.sansa_stack.query.spark.ontop.OntopKryoRegistrator"))
.config("spark.sql.crossJoin.enabled", true)
.getOrCreate()
spark.sparkContext.setLogLevel("ERROR")
implicit val nodeTupleEncoder = Encoders.kryo(classOf[(Node, Node, Node)])
// get lang from filename
val lang = RDFLanguages.filenameToLang(inputFilePath)
// load RDF to Dataset
val dataset = NTripleReader.load(
spark,
inputFilePath,
stopOnBadTerm = ErrorParseMode.SKIP,
stopOnWarnings = WarningParseMode.IGNORE
).toDS().cache()
// println("The dataframe looks like this:")
// df.show(false)
val (totalSparqlQuery: String, var_names: List[String]) = createSparql(
ds = dataset,
seedVarName = seedVarName,
seedWhereClause = whereClauseForSeed,
maxUp = maxUp,
maxDown = maxDown,
numberSeeds = seedNumber,
ratioNumberSeeds = seedNumberAsRatio,
numberRandomWalks = numberRandomWalks,
sortedByLinks = sortedByLinks
)
println(
f"""
|The automatic created feature extracting sparql fetched ${var_names.size} projection variables representing literals.
|the projection variables are:
|${var_names.map(vn => f"\t$vn").mkString("\n")}
|\n
|""".stripMargin)
println(f"The resulting sparql query is: \n$totalSparqlQuery")
Files.write(Paths.get(outputFilePath), totalSparqlQuery.getBytes(StandardCharsets.UTF_8))
println(f"generated sparql has been stored to: $outputFilePath")
}
}
|
SethuSenthil/Mobile-App
|
ImageRadioDemo/app/src/main/java/com/example/imageradiodemo/MainActivity.java
|
package com.example.imageradiodemo;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.ImageView;
import android.widget.RadioGroup;
import android.widget.Toast;
public class MainActivity extends AppCompatActivity {
ImageView imageView;
RadioGroup radioGroup;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
imageView = findViewById(R.id.imageView2);
//imageView.setImageResource(R.drawable.child);
radioGroup = findViewById(R.id.id_radio);
radioGroup.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(RadioGroup group, int checkedId) {
if(checkedId == R.id.radioButton2){
//do stuff
Toast myMessage = Toast.makeText(MainActivity.this, "Don't Fall Asleep!", Toast.LENGTH_SHORT);
myMessage.show();
}
}
});
}
}
|
JOKERsamadd/bishe
|
febs-security/src/main/java/cn/net/zhipeng/security/domain/LoginType.java
|
package cn.net.zhipeng.security.domain;
public enum LoginType {
normal, sms, social;
}
|
dszmaj/wikidpad
|
lib/gadfly/bindings.py
|
""" Rule bindings for sql grammar.
:Author: <NAME>
:Maintainers: http://gadfly.sf.net/
:Copyright: <NAME>, 1994
:Id: $Id: bindings.py,v 1.2 2008/10/26 14:41:01 <NAME> Exp $:
"""
import semantics
def elt0(list, context):
"""return first member of reduction"""
return list[0]
def elt1(list, context):
"""return second member"""
return list[1]
def elt2(list, context):
return list[2]
def returnNone(list, context):
return None
def stat1(list, context):
"""return list of len 1 of statements"""
return list
#def statn(list, context):
# """return a list of statement reductions"""
# [stat, semi, statlist] = list
# statlist.insert(0, stat)
# return statlist
def thingcommalist(l, c):
[thing, comma, list] = l
list.insert(0, thing)
return list
def listcommathing(l, c):
[list, comma, thing] = l
list.append(thing)
return list
statn = thingcommalist
selstat = elt0
insstat = elt0
createtablestat = elt0
droptablestat = elt0
delstat = elt0
updatestat = elt0
createindexstat = elt0
dropindexstat = elt0
createviewstat = elt0
dropviewstat = elt0
# drop view statement stuff
def dropview(l, c):
[drop, view, name] = l
return semantics.DropView(name)
# create view statement stuff
def createview(l, c):
[create, view, name, namelist, dummyAs, selection] = l
return semantics.CreateView(name, namelist, selection)
optnamelist0 = returnNone
optnamelistn = elt1
# drop index statement stuff
def dropindex(l, c):
[drop, index, name] = l
return semantics.DropIndex(name)
# create index statement stuff
def createindex(l, c):
[create, index, name, on, table, op, namelist, cp] = l
return semantics.CreateIndex(name, table, namelist)
def createuniqueindex(l, c):
[create, unique, index, name, on, table, op, namelist, cp] = l
return semantics.CreateIndex(name, table, namelist, unique=1)
names1 = stat1
namesn = listcommathing
# update statement stuff
def update(l, c):
[upd, name, set, assns, condition] = l
return semantics.UpdateOp(name, assns, condition)
def assn(l, c):
[col, eq, exp] = l
return (col, exp)
def assn1(l, c):
[ (col, exp) ] = l
result = semantics.TupleCollector()
result.addbinding(col, exp)
return result
def assnn(l, c):
[ result, comma, (col, exp) ] = l
result.addbinding(col, exp)
return result
# delete statement stuff
def deletefrom(l, c):
[delete, fromkw, name, where] = l
return semantics.DeleteOp(name, where)
# drop table stuff
def droptable(l, c):
[drop, table, name] = l
return semantics.DropTable(name)
# create table statement stuff
def createtable(list, context):
[create, table, name, p1, colelts, p2] = list
return semantics.CreateTable(name, colelts)
colelts1 = stat1
coleltsn = listcommathing
#def coleltsn(list, c):
# [c1, cc, ce] = list
# c1.append(ce)
# return c1
coleltid = elt0
coleltconstraint = elt0
def coldef(l, c):
[colid, datatype, default, constraints] = l
return semantics.ColumnDef(colid, datatype, default, constraints)
optdef0 = returnNone
optcolconstr0 = returnNone
stringtype = exnumtype = appnumtype = integer = float = varchar = elt0
varcharn = elt0
# insert statement stuff
def insert1(l, c):
[insert, into, name, optcolids, insert_spec] = l
return semantics.InsertOp(name, optcolids, insert_spec)
optcolids0 = returnNone
optcolids1 = elt1
colids1 = stat1
colidsn = listcommathing
def insert_values(l, c):
return semantics.InsertValues(l[2])
def insert_query(l, c):
return semantics.InsertSubSelect(l[0])
litlist1 = stat1
litlistn = listcommathing
sliteral0 = elt0
def sliteralp(l, c):
[p, v] = l
return +v
def sliterald(l, c):
[l1, m, l2] = l
return l1 - l2
def sliterals(l, c):
[l1, p, l2] = l
return l1 + l2
def sliteralm(l, c):
[m, v] = l
return -v
# select statement stuff
def selectx(list, context):
[sub, optorder_by] = list
#sub.union_select = optunion
sub.order_by = optorder_by
# number of dynamic parameters in this parse.
sub.ndynamic = context.ndynamic()
return sub
psubselect = elt1
def subselect(list, context):
[select, alldistinct, selectlist, fromkw, trlist,
optwhere, optgroup, opthaving, optunion] = list
sel = semantics.Selector(alldistinct, selectlist, trlist, optwhere,
optgroup, opthaving,
# store # of dynamic parameters seen in this parse.
ndynamic = context.ndynamic()
)
sel.union_select = optunion
return sel
def ad0(list, context):
return "ALL"
adall = ad0
def addistinct(list, context):
return "DISTINCT"
def where0(list, context):
return semantics.BTPredicate() # true
where1 = elt1
group0 = returnNone
group1 = elt2
colnames1 = stat1
colnamesn = listcommathing
having0 = returnNone
having1 = elt1
union0 = returnNone
def union1(l, c):
[union, alldistinct, selection] = l
return semantics.Union(alldistinct, selection)
def except1(l, c):
[union, selection] = l
alldistinct = "DISTINCT"
return semantics.Except(alldistinct, selection)
def intersect1(l, c):
[union, selection] = l
alldistinct = "DISTINCT"
return semantics.Intersect(alldistinct, selection)
order0 = returnNone
order1 = elt2
#orderby = elt2
sortspec1 = stat1
sortspecn = listcommathing
def sortint(l, c):
[num, ord] = l
from types import IntType
if type(num)!=IntType or num<=0:
raise ValueError, `num`+': col position not positive int'
return semantics.PositionedSort(num, ord)
def sortcol(l, c):
[name, ord] = l
return semantics.NamedSort(name, ord)
def optord0(l, c):
return "ASC"
optordasc = optord0
def optorddesc(l, c):
return "DESC"
## table reference list returns list of (name, name) or (name, alias)
def trl1(l, c):
[name] = l
return [(name, name)]
def trln(l,c):
[name, comma, others] = l
others.insert(0, (name, name))
return others
def trl1a(l,c):
[name, alias] = l
return [(name, alias)]
def trlna(l,c):
[name, alias, comma, others] = l
others.insert(0, (name, alias))
return others
def trl1as(l,c):
[name, dummyAs, alias] = l
return [(name, alias)]
def trlnas(l,c):
[name, dummyAs, alias, comma, others] = l
others.insert(0, (name, alias))
return others
tablename1 = elt0
columnid1 = elt0
def columnname1(list, context):
[ci] = list
return columnname2([None, None, ci], context)
def columnname2(list, context):
[table, ignore, col] = list
return semantics.BoundAttribute(table, col)
def dynamic(list, context):
# return a new dynamic parameter
int = context.param()
return semantics.BoundAttribute(0, int)
# expression stuff
def literal(list, context):
[lit] = list
return semantics.Constant(lit)
def stringstring(l, c):
"""two strings in sequence = apostrophe"""
[l1, l2] = l
value = "%s'%s" % (l1.value0, l2)
return semantics.Constant(value)
numlit = literal
stringlit = literal
primarylit = elt0
primary1 = elt0
factor1 = elt0
term1 = elt0
exp1 = elt0
def expplus(list, context):
[exp, plus, term] = list
return exp + term
def expminus(list, context):
[exp, minus, term] = list
return exp - term
def termtimes(list, context):
[exp, times, term] = list
return exp * term
def termdiv(list, context):
[exp, div, term] = list
return exp / term
plusfactor = elt1
def minusfactor(list, context):
[minus, factor] = list
return -factor
primaryexp = elt1
primaryset = elt0
def countstar(l, c):
return semantics.Count("*")
def distinctset(l, c):
[agg, p1, distinct, exp, p2] = l
return set(agg, exp, 1)
distinctcount = distinctset
def allset(l, c):
[agg, p1, exp, p2] = l
return set(agg, exp, 0)
allcount = allset
def set(agg, exp, distinct):
import semantics
if agg=="AVG":
return semantics.Average(exp, distinct)
if agg=="COUNT":
return semantics.Count(exp, distinct)
if agg=="MAX":
return semantics.Maximum(exp, distinct)
if agg=="MIN":
return semantics.Minimum(exp, distinct)
if agg=="SUM":
return semantics.Sum(exp, distinct)
if agg=="MEDIAN":
return semantics.Median(exp, distinct)
raise NameError, `agg`+": unknown aggregate"
average = count = maximum = minimum = summation = median = elt0
def predicateeq(list, context):
[e1, eq, e2] = list
return e1.equate(e2)
def predicatene(list, context):
[e1, lt, gt, e2] = list
return ~(e1.equate(e2))
def predicatelt(list, context):
[e1, lt, e2] = list
return e1.lt(e2)
def predicategt(list, context):
[e1, lt, e2] = list
return e2.lt(e1)
def predicatele(list, context):
[e1, lt, eq, e2] = list
return e1.le(e2)
def predicatege(list, context):
[e1, lt, eq, e2] = list
return e2.le(e1)
def predbetween(list, context):
[e1, between, e2, andkw, e3] = list
return semantics.BetweenPredicate(e1, e2, e3)
def prednotbetween(list, context):
[e1, notkw, between, e2, andkw, e3] = list
return ~semantics.BetweenPredicate(e1, e2, e3)
predicate1 = elt0
bps = elt1
bp1 = elt0
# exists predicate stuff
predexists = elt0
def exists(l, c):
[ex, paren1, subquery, paren2] = l
return semantics.ExistsPred(subquery)
def notbf(list, context):
[ notst, thing ] = list
return ~thing
# quantified predicates
nnall = elt0
nnany = elt0
def predqeq(list, context):
[exp, eq, allany, p1, subq, p2] = list
if allany=="ANY":
return semantics.QuantEQ(exp, subq)
else:
return ~semantics.QuantNE(exp, subq)
def predqne(list, context):
[exp, lt, gt, allany, p1, subq, p2] = list
if allany=="ANY":
return semantics.QuantNE(exp, subq)
else:
return ~semantics.QuantEQ(exp, subq)
def predqlt(list, context):
[exp, lt, allany, p1, subq, p2] = list
if allany=="ANY":
return semantics.QuantLT(exp, subq)
else:
return ~semantics.QuantGE(exp, subq)
def predqgt(list, context):
[exp, gt, allany, p1, subq, p2] = list
if allany=="ANY":
return semantics.QuantGT(exp, subq)
else:
return ~semantics.QuantLE(exp, subq)
def predqle(list, context):
[exp, less, eq, allany, p1, subq, p2] = list
if allany=="ANY":
return semantics.QuantLE(exp, subq)
else:
return ~semantics.QuantGT(exp, subq)
def predqge(list, context):
[exp, gt, eq, allany, p1, subq, p2] = list
if allany=="ANY":
return semantics.QuantGE(exp, subq)
else:
return ~semantics.QuantLT(exp, subq)
# subquery expression
def subqexpr(list, context):
[p1, subq, p2] = list
return semantics.SubQueryExpression(subq)
def predin(list, context):
[exp, inkw, p1, subq, p2] = list
return semantics.InPredicate(exp, subq)
def prednotin(list, context):
[exp, notkw, inkw, p1, subq, p2] = list
return ~semantics.InPredicate(exp, subq)
def predinlits(list, context):
[exp, inkw, p1, lits, p2] = list
return semantics.InLits(exp, lits)
def prednotinlits(list, context):
[exp, notkw, inkw, p1, lits, p2] = list
return ~semantics.InLits(exp, lits)
bf1 = elt0
def booln(list, context):
[ e1, andst, e2 ] = list
return e1&e2
bool1 = elt0
def searchn(list, context):
[ e1, orst, e2 ] = list
return e1 | e2
search1 = elt0
colalias = elt0
# select list stuff
def selectstar(l,c):
return "*"
selectsome = elt0
select1 = elt0
# selectsub returns (expression, asname)
def select1(list, context):
[ (exp, name) ] = list
result = semantics.TupleCollector()
result.addbinding(name, exp)
return result
def selectn(list, context):
[ selectsubs, comma, select_sublist ] = list
(exp, name) = select_sublist
selectsubs.addbinding(name, exp)
return selectsubs
def selectit(list, context):
[exp] = list
return (exp, None) # no binding!
def selectname(list, context):
[exp, dummyAs, alias] = list
return (exp, alias)
colalias = elt0
#### do the bindings.
# note: all reduction function defs must precede this assign
VARS = vars()
class punter:
def __init__(self, name):
self.name = name
def __call__(self, list, context):
print "punt:", self.name, list
return list
class tracer:
def __init__(self, name, fn):
self.name = name
self.fn = fn
def __call__(self, list, context):
print self.name, list
return self.fn(list, context)
def BindRules(sqlg):
for name in sqlg.RuleNameToIndex.keys():
if VARS.has_key(name):
#print "binding", name
sqlg.Bind(name, VARS[name]) # nondebug
#sqlg.Bind(name, tracer(name, VARS[name]) ) # debug
else:
print "unbound", name
sqlg.Bind(name, punter(name))
return sqlg
#
# $Log: bindings.py,v $
# Revision 1.2 2008/10/26 14:41:01 <NAME>
# 1.9beta19
#
# Revision 1.1 2006/01/07 15:01:23 <NAME>
# First combined version of WikidPad/WikidPadCompact
#
# Revision 1.1 2005/06/05 05:51:23 jhorman
# initial checkin
#
# Revision 1.4 2002/05/11 02:59:04 richard
# Added info into module docstrings.
# Fixed docco of kwParsing to reflect new grammar "marshalling".
# Fixed bug in gadfly.open - most likely introduced during sql loading
# re-work (though looking back at the diff from back then, I can't see how it
# wasn't different before, but it musta been ;)
# A buncha new unit test stuff.
#
# Revision 1.3 2002/05/08 00:49:00 anthonybaxter
# El Grande Grande reindente! Ran reindent.py over the whole thing.
# Gosh, what a lot of checkins. Tests still pass with 2.1 and 2.2.
#
# Revision 1.2 2002/05/08 00:31:52 richard
# More cleanup.
#
# Revision 1.1.1.1 2002/05/06 07:31:09 richard
#
#
#
|
BuggMaker/aiMap
|
src/utils/index.js
|
import * as UtilObj from "./UtilObject";
export { UtilObj };
export { extend, merge, replace, copy, isInstanceOf } from "./UtilObject";
export * from "./UtilColor";
export * from "./Util";
export * from "./UtilFetch";
|
neko-kai/izumi-r2
|
distage/distage-roles/src/main/scala/com/github/pshirshov/izumi/distage/roles/impl/ScoptLauncherArgs.scala
|
<reponame>neko-kai/izumi-r2<filename>distage/distage-roles/src/main/scala/com/github/pshirshov/izumi/distage/roles/impl/ScoptLauncherArgs.scala
package com.github.pshirshov.izumi.distage.roles.impl
import java.io.File
import com.github.pshirshov.izumi.distage.roles.impl.ScoptLauncherArgs.WriteReference
import com.github.pshirshov.izumi.distage.roles.launcher.RoleArgs
import com.github.pshirshov.izumi.logstage.api.Log
import scopt.{OptionParser, Read}
case class ScoptLauncherArgs(
configFile: Option[File] = None
, writeReference: Option[WriteReference] = None
, dummyStorage: Option[Boolean] = Some(false)
, rootLogLevel: Log.Level = Log.Level.Info
, jsonLogging: Option[Boolean] = Some(false)
, roles: List[RoleArgs] = List.empty
)
object ScoptLauncherArgs {
case class WriteReference(asJson: Boolean = false
, targetDir: String = "config"
, includeCommon: Boolean = true)
lazy val parser: OptionParser[ScoptLauncherArgs] = new OptionParser[ScoptLauncherArgs]("tg-launcher") {
head("tg-launcher", "TODO: manifest version")
help("help")
opt[Unit]("logs-json").abbr("lj")
.text("turn on json logging")
.action { (_, c) =>
c.copy(jsonLogging = Some(true))
}
opt[Unit]("dummy-storage").abbr("ds")
.text("use in-memory dummy storages instead of production ones")
.action { (_, c) =>
c.copy(dummyStorage = Some(true))
}
opt[Log.Level]("root-log-level").abbr("rll")
.text("Root logging level")
.valueName("log level")
.action { (v, c) =>
c.copy(rootLogLevel = v)
}
opt[File]('c', "config")
.text("configuration file")
.valueName("<common config file>")
.action {
case (x, c) =>
c.copy(configFile = Some(x))
}
opt[Unit]("write-reference").abbr("wr").action((_, c) =>
c.copy(writeReference = Some(WriteReference()))).text("dump reference config in HOCON format")
.children(
opt[Unit]("json").abbr("js").action((_, c) =>
c.copy(writeReference = Some(WriteReference(asJson = true)))
).text("dump reference config in json format"),
opt[Boolean]("include-common").abbr("ic").action((b, c) =>
c.copy(writeReference = Some(WriteReference(includeCommon = b)))
).text("include common part in role configs"),
opt[String]("targetDir").abbr("d").action((dir, c) =>
c.copy(writeReference = Some(WriteReference(targetDir = dir)))
).text("folder to store reference configs, ./config by default")
)
arg[String]("<role>...").unbounded().optional()
.text("roles to enable")
.action {
(a, c) =>
c.copy(roles = c.roles :+ RoleArgs(a, None))
}
.children(
opt[String]("role-id").abbr("i")
.text("role id to enable (legacy option, just put role name as argument)")
.action { case (a, c) =>
c.copy(roles = c.roles.init :+ c.roles.last.copy(name = a))
},
opt[File]("role-config").abbr("rc").optional()
.text("config file for role, it will override the common config file")
.action { case (f, c) =>
c.copy(roles = c.roles.init :+ c.roles.last.copy(configFile = Some(f)))
},
)
}
implicit lazy val logLevelRead: Read[Log.Level] = Read.reads[Log.Level] {
v =>
v.charAt(0).toLower match {
case 't' => Log.Level.Trace
case 'd' => Log.Level.Debug
case 'i' => Log.Level.Info
case 'w' => Log.Level.Warn
case 'e' => Log.Level.Error
case 'c' => Log.Level.Crit
}
}
}
|
Mynogs/Algorithm-Implementations
|
Counting_Sort/Java/rrivera1849/Counting_Sort.java
|
<filename>Counting_Sort/Java/rrivera1849/Counting_Sort.java
/*
* Written by: <NAME>
* Last Updated: January 7, 2014
*/
public class Counting_Sort {
public static int[] countingSort(int[] array, int maxNumber){
int[] sorted = new int[array.length];
int[] workArray = new int[maxNumber + 1];
/*
* Inspect every element in the array, if the value of the element is i
* then we increment workArray[i].
*/
for(int i = 0; i < array.length; i++)
workArray[array[i]] += 1;
workArray[0] -= 1; // Subtract one because index begins at 0
/*
* For each workArray[j] count how many elements are less than it. This
* determines the position of the element.
*/
for(int j = 1; j < workArray.length; j++)
workArray[j] += workArray[j-1];
// Insert the correct element in the array
for(int z = array.length-1; z >= 0; z--){
sorted[workArray[array[z]]] = array[z];
workArray[array[z]] -= 1;
}
return sorted;
}
}
|
connorwyatt/AxonFramework
|
axon-server-connector/src/main/java/org/axonframework/axonserver/connector/util/GrpcObjectSerializer.java
|
/*
* Copyright (c) 2018. AxonIQ
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.axonserver.connector.util;
import com.google.protobuf.ByteString;
import org.axonframework.serialization.SerializedObject;
import java.util.function.Function;
import static org.axonframework.common.ObjectUtils.getOrDefault;
/**
* Mapping that translates an object into a GRPC {@link io.axoniq.axonserver.grpc.SerializedObject}.
*
* @author <NAME>
* @since 4.0
*/
public class GrpcObjectSerializer<O> implements Function<O, io.axoniq.axonserver.grpc.SerializedObject> {
public interface Serializer<A> {
<T> SerializedObject<T> serialize(A object, Class<T> expectedRepresentation);
}
private final Serializer<O> serializer;
public GrpcObjectSerializer(org.axonframework.serialization.Serializer serializer) {
this(serializer::serialize);
}
GrpcObjectSerializer(Serializer<O> serializer) {
this.serializer = serializer;
}
@Override
public io.axoniq.axonserver.grpc.SerializedObject apply(O o) {
SerializedObject<byte[]> serializedPayload = serializer.serialize(o, byte[].class);
return io.axoniq.axonserver.grpc.SerializedObject.newBuilder()
.setData(ByteString.copyFrom(serializedPayload.getData()))
.setType(serializedPayload.getType().getName())
.setRevision(getOrDefault(serializedPayload.getType().getRevision(), ""))
.build();
}
}
|
WhatDanDoes/silid
|
src/silid-server/spec/lib/checkPermissionsSpec.js
|
'use strict';
require('dotenv').config();
/**
* 2021-7-12
*
* The team-org stuff has been removed but the related tests are preserved here
* as a _how-to_ for the future.
*
* Commented code is left in place for repurposing when consumer requirements
* are more fully understood.
*/
const nock = require('nock');
const httpMocks = require('node-mocks-http');
const fixtures = require('sequelize-fixtures');
const uuid = require('uuid');
const models = require('../../models');
const Agent = models.Agent;
const Profile = require('passport-auth0/lib/Profile');
const stubAuth0ManagementApi = require('../support/stubAuth0ManagementApi');
const stubUserAppMetadataUpdate = require('../support/auth0Endpoints/stubUserAppMetadataUpdate');
const stubUserRead = require('../support/auth0Endpoints/stubUserRead');
const stubRolesRead = require('../support/auth0Endpoints/stubRolesRead');
const stubUserRolesRead = require('../support/auth0Endpoints/stubUserRolesRead');
const stubUserAssignRoles = require('../support/auth0Endpoints/stubUserAssignRoles');
const checkPermissions = require('../../lib/checkPermissions');
/**
* 2019-11-13
* Sample tokens taken from:
*
* https://auth0.com/docs/api-auth/tutorials/adoption/api-tokens
*/
const _identity = require('../fixtures/sample-auth0-identity-token');
const _profile = require('../fixtures/sample-auth0-profile-response');
const scope = require('../../config/permissions');
const apiScope = require('../../config/apiPermissions');
const roles = require('../../config/roles');
const roleDescriptions = require('../fixtures/roles');
describe('checkPermissions', function() {
let agent, request, response, rolesReadScope, userAssignRolesScope, userReadScope, userRolesReadScope;
beforeEach(done => {
nock.cleanAll();
_profile.scope = roles.viewer;
/**
* Agents need basic viewing privileges. This stubs the
* role-getting and role-assigning endpoints
*/
stubAuth0ManagementApi((err, apiScopes) => {
if (err) return done.fail(err);
({rolesReadScope, userAssignRolesScope, userReadScope, userRolesReadScope, checkPermissionsUserReadScope} = apiScopes);
done();
});
});
afterEach(() => {
// Through the magic of node I am able to adjust the profile data returned.
// This resets the default values
delete _profile.scope;
delete _profile.user_metadata;
delete _profile.roles;
delete _profile.isSuper;
});
describe('returning visitor', () => {
let authenticatedSession, profile;
beforeEach(function(done) {
response = httpMocks.createResponse();
models.sequelize.sync({force: true}).then(() => {
fixtures.loadFile(`${__dirname}/../fixtures/agents.json`, models).then(() => {
models.Agent.findAll().then(results => {
agent = results[0];
done();
}).catch(err => {
done.fail(err);
});
}).catch(err => {
done.fail(err);
});
}).catch(err => {
done.fail(err);
});
});
it('attaches roles to req.user', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile},
});
expect(request.user.roles).toBeUndefined();
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(request.user.roles).toBeDefined();
expect(request.user.roles.length).toEqual(1);
expect(request.user.roles[0].name).toEqual('viewer');
done();
});
});
it('saves the Auth0-provided profile the agent\'s socialProfile', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile}
});
expect(agent.socialProfile).toBeNull();
checkPermissions([scope.read.agents])(request, response, err => {
if (err) return done.fail(err);
models.Agent.findOne({ where: { email: _identity.email }}).then(agent => {
expect(agent.socialProfile).toEqual(_profile);
done();
}).catch(err => {
done.fail(err);
});
});
});
describe('Auth0 roles', () => {
it('calls the management API to retrieve all the roles', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: null}
});
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(rolesReadScope.isDone()).toBe(true);
done();
});
});
it('is called to retrieve the roles assigned to the agent', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: null}
});
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(userRolesReadScope.isDone()).toBe(true);
done();
});
});
it('calls the management API to assign viewer role if agent not already a viewer', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: null}
});
checkPermissions([scope.read.agents])(request, response, err => {
if (err) return done.fail(err);
expect(userAssignRolesScope.isDone()).toBe(true);
done();
});
});
it('does not call the management API if agent is already a viewer', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: roles.viewer}
});
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(rolesReadScope.isDone()).toBe(false);
expect(userAssignRolesScope.isDone()).toBe(false);
done();
});
});
it('attaches the new scoped permissions to req.user', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: _profile,
});
expect(request.user.scope).toEqual(_profile.scope);
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(request.user.scope).toEqual([...new Set(_profile.scope.concat(roles.viewer))]);
done();
});
});
describe('req.user.isSuper', () => {
it('is set to false if not a super agent', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile},
});
expect(request.user.isSuper).toBeUndefined();
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(request.user.isSuper).toBe(false);
done();
});
});
it('is set to true if a super agent', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile},
});
expect(request.user.isSuper).toBeUndefined();
// Clear the mocks and set them up again
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
stubUserRead((err, apiScopes) => {
if (err) return done(err);
stubRolesRead((err, apiScopes) => {
if (err) return done(err);
stubUserAssignRoles((err, apiScopes) => {
if (err) return done(err);
stubUserRolesRead([{
"id": "234",
"name": "sudo",
"description": "All-access pass to Identity resources"
},
{
"id": "345",
"name": "viewer",
"description": "Basic agent, organization, and team viewing permissions"
}], (err, apiScopes) => {
if (err) return done(err);
expect(request.user.isSuper).toBe(false);
// Try again with mocks reset
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(request.user.isSuper).toBe(true);
done();
});
});
});
});
});
});
});
});
});
/**
* 2020-4-28
*
* There may come a day when we don't need the database anymore. Until that happens, these
* tests ensure that the `req.user` object is consistent with what _should_ be stored
* at Auth0.
*/
describe('Auth0 caching', () => {
beforeEach(done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: _profile
});
// Sanity check
expect(userReadScope.isDone()).toBe(false);
checkPermissions([scope.read.agents])(request, response, err => {
if (err) return done.fail(err);
expect(userReadScope.isDone()).toBe(true);
models.Agent.findOne({ where: { email: _identity.email }}).then(results => {
agent = results;
stubUserRolesRead((err, apiScopes) => {
if (err) return done(err);
({userRolesReadScope} = apiScopes);
done();
});
}).catch(err => {
done.fail(err);
});
});
});
it('does not call the management API if Auth0 and the local cache data are consistent', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: _profile
});
agent.socialProfile = _profile;
agent.save().then(() => {
expect(agent.socialProfile).toEqual(_profile);
checkPermissions([scope.read.agents])(request, response, err => {
if (err) return done.fail(err);
// This is not satisfied because the cached record matches the Auth0
expect(checkPermissionsUserReadScope.isDone()).toBe(false);
done();
});
}).catch(err => {
done.fail(err);
});
});
it('calls the management API if there is inconsistency between Auth0 and the local cache', done => {
// This is satisfied by the initial authorization call
expect(userReadScope.isDone()).toBe(true);
expect(checkPermissionsUserReadScope.isDone()).toBe(false);
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: _profile
});
expect(agent.socialProfile).toEqual(_profile);
checkPermissions([scope.read.agents])(request, response, err => {
if (err) return done.fail(err);
// This is not satisfied because the cached record matches the Auth0
expect(checkPermissionsUserReadScope.isDone()).toBe(false);
stubUserRolesRead((err, apiScopes) => {
if (err) return done(err);
// A weird situation to be sure, but one that needs attention
agent.socialProfile = {..._profile, user_metadata: { teams: [] }};
agent.save().then(() => {
expect(agent.socialProfile).not.toEqual(_profile);
// Cached profile doesn't match Auth0
checkPermissions([scope.read.agents])(request, response, err => {
if (err) return done.fail(err);
expect(userReadScope.isDone()).toBe(true);
expect(checkPermissionsUserReadScope.isDone()).toBe(true);
done();
});
}).catch(err => {
done.fail(err);
});
});
});
});
});
});
describe('first visit', () => {
let profile, userAppMetadataUpdateScope, userAppMetadataUpdateOauthTokenScope;
beforeEach(done => {
response = httpMocks.createResponse();
models.sequelize.sync({force: true}).then(() => {
stubUserAppMetadataUpdate((err, apiScopes) => {
if (err) return done.fail();
({userAppMetadataUpdateScope, userAppMetadataUpdateOauthTokenScope} = apiScopes);
done();
});
}).catch(err => {
done.fail(err);
});
});
it('attaches roles to req.user', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
expect(request.user.roles).toBeUndefined();
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(request.user.roles).toBeDefined();
expect(request.user.roles.length).toEqual(1);
expect(request.user.roles[0].name).toEqual('viewer');
done();
});
});
it('adds agent to the database', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
Agent.findAll().then(a => {
expect(a.length).toEqual(0);
checkPermissions([scope.read.agents])(request, response, err => {
if (err) return done.fail(err);
Agent.findOne({ where: { email: _identity.email } }).then(a => {
expect(request.user).toEqual({...a.dataValues.socialProfile, isSuper: false, isOrganizer: false, roles: [roleDescriptions[2]] });
done();
}).catch(err => {
done.fail(err);
});
});
}).catch(err => {
done.fail();
});
});
it('saves the Identity Token in the agent\'s socialProfile', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
checkPermissions([scope.read.agents])(request, response, err => {
if (err) return done.fail(err);
expect(request.user).toEqual({..._profile, isSuper: false, isOrganizer: false, roles: [roleDescriptions[2]] });
Agent.findOne({ where: { email: request.user.email } }).then(a => {
expect(request.user).toEqual({...a.socialProfile, isSuper: false, isOrganizer: false, roles: [roleDescriptions[2]] });
done();
}).catch(err => {
done.fail(err);
});
});
});
describe('Auth0 roles', () => {
it('calls the management API to retrieve all the roles and set viewer role', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(rolesReadScope.isDone()).toBe(true);
expect(userAssignRolesScope.isDone()).toBe(true);
done();
});
});
it('attaches the new scoped permissions to req.user', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
expect(request.user.scope).toBeUndefined();
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(request.user.scope).toEqual(_profile.scope);
done();
});
});
it('is called to retrieve the roles assigned to the agent', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(userRolesReadScope.isDone()).toBe(true);
done();
});
});
});
describe('with pending team invitations', () => {
beforeEach(done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, email: '<EMAIL>', scope: undefined}
});
done();
});
describe('a single invitation', () => {
let teamId;
beforeEach(done => {
teamId = uuid.v4();
models.Update.create({ recipient: '<EMAIL>', uuid: teamId, type: 'team',
data: {name: '<NAME>', leader: _profile.email, id: teamId} }).then(results => {
done();
}).catch(err => {
done.fail(err);
});
});
it('removes the update from the database', done => {
models.Update.findAll().then(updates => {
expect(updates.length).toEqual(1);
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
models.Update.findAll().then(updates => {
expect(updates.length).toEqual(0);
done();
}).catch(err => {
done.fail(err);
});
});
}).catch(err => {
done.fail(err);
});
});
// For repurposing...
//
//it('writes the invite to the agent\'s user_metadata', done => {
// expect(request.user.user_metadata).toBeUndefined();
// checkPermissions([])(request, response, err => {
// if (err) return done.fail(err);
// expect(request.user.user_metadata).toBeDefined();
// expect(request.user.user_metadata.rsvps.length).toEqual(1);
// expect(request.user.user_metadata.rsvps[0].uuid).toEqual(teamId);
// expect(request.user.user_metadata.rsvps[0].type).toEqual('team');
// expect(request.user.user_metadata.rsvps[0].recipient).toEqual('<EMAIL>');
// expect(request.user.user_metadata.rsvps[0].data.name).toEqual('The Calgary Roughnecks');
// expect(request.user.user_metadata.rsvps[0].data.id).toEqual(teamId);
// expect(request.user.user_metadata.rsvps[0].data.leader).toEqual(_profile.email);
// expect(request.user.user_metadata.rsvps[0].data.organizationId).toBeUndefined();
// done();
// });
//});
describe('Auth0', () => {
it('is called to write the updates to the agent\'s user_metadata', done => {
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(userAppMetadataUpdateScope.isDone()).toBe(true);
expect(userAppMetadataUpdateOauthTokenScope.isDone()).toBe(true);
done();
});
});
});
describe('and now multiple invitations', () => {
let anotherTeamId;
beforeEach(done => {
anotherTeamId = uuid.v4();
models.Update.create({ recipient: '<EMAIL>', uuid: anotherTeamId, type: 'team',
data: {name: 'The Buffalo Bandits', leader: _profile.email, id: anotherTeamId} }).then(results => {
done();
}).catch(err => {
done.fail(err);
});
});
it('removes the update from the database', done => {
models.Update.findAll().then(updates => {
expect(updates.length).toEqual(2);
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
models.Update.findAll().then(updates => {
expect(updates.length).toEqual(0);
done();
}).catch(err => {
done.fail(err);
});
});
}).catch(err => {
done.fail(err);
});
});
// For repurposing...
//
//it('writes the invite to the agent\'s user_metadata', done => {
// expect(request.user.user_metadata).toBeUndefined();
// checkPermissions([])(request, response, err => {
// if (err) return done.fail(err);
// expect(request.user.user_metadata).toBeDefined();
// expect(request.user.user_metadata.rsvps.length).toEqual(2);
// expect(request.user.user_metadata.rsvps[0].uuid).toEqual(anotherTeamId);
// expect(request.user.user_metadata.rsvps[0].type).toEqual('team');
// expect(request.user.user_metadata.rsvps[0].recipient).toEqual('<EMAIL>');
// expect(request.user.user_metadata.rsvps[0].data.name).toEqual('<NAME>');
// expect(request.user.user_metadata.rsvps[0].data.id).toEqual(anotherTeamId);
// expect(request.user.user_metadata.rsvps[0].data.leader).toEqual(_profile.email);
// expect(request.user.user_metadata.rsvps[0].data.organizationId).toBeUndefined();
// expect(request.user.user_metadata.rsvps[1].uuid).toEqual(teamId);
// expect(request.user.user_metadata.rsvps[1].type).toEqual('team');
// expect(request.user.user_metadata.rsvps[1].recipient).toEqual('<EMAIL>');
// expect(request.user.user_metadata.rsvps[1].data.name).toEqual('<NAME>');
// expect(request.user.user_metadata.rsvps[1].data.id).toEqual(teamId);
// expect(request.user.user_metadata.rsvps[1].data.leader).toEqual(_profile.email);
// expect(request.user.user_metadata.rsvps[1].data.organizationId).toBeUndefined();
// done();
// });
//});
describe('Auth0', () => {
it('is called to write the updates to the agent\'s user_metadata', done => {
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(userAppMetadataUpdateScope.isDone()).toBe(true);
expect(userAppMetadataUpdateOauthTokenScope.isDone()).toBe(true);
done();
});
});
});
});
});
});
});
/**
* An invited agent won't have any profile data set expect for email
*/
describe('invited agent', () => {
let invitedAgent, profile, userAppMetadataUpdateScope, userAppMetadataUpdateOauthTokenScope;
beforeEach(done => {
models.sequelize.sync({force: true}).then(() => {
invitedAgent = new Agent({ email: _identity.email });
invitedAgent.save().then(res => {
stubUserAppMetadataUpdate((err, apiScopes) => {
if (err) return done.fail();
({userAppMetadataUpdateScope, userAppMetadataUpdateOauthTokenScope} = apiScopes);
done();
});
}).catch(err => {
done.fail(err);
});
}).catch(err => {
done.fail(err);
});
});
it('populates new agent\'s fields with data from the identity token when the social profile is out of date', done => {
request = httpMocks.createRequest({
method: 'GET',
url: '/agent',
user: {..._profile}
});
expect(invitedAgent.socialProfile).toBeNull();
checkPermissions([scope.read.agents])(request, response, err => {
if (err) return done.fail(err);
Agent.findOne({ where: { email: _identity.email } }).then(a => {
expect(a.socialProfile).toEqual(_profile);
expect(a.email).toEqual(_identity.email);
expect(a.name).toEqual(_identity.name);
done();
}).catch(err => {
done.fail(err);
});
});
});
it('populates new agent\'s fields with data from the identity token-generated profile when the social profile is up to date', done => {
invitedAgent.socialProfile = _identity;
invitedAgent.save().then(savedAgent => {
request = httpMocks.createRequest({
method: 'GET',
url: '/agent',
user: {..._profile}
});
expect(savedAgent.socialProfile).toEqual(_identity);
checkPermissions([scope.read.agents])(request, response, err => {
if (err) return done.fail(err);
Agent.findOne({ where: { email: _identity.email } }).then(a => {
expect(a.socialProfile).toEqual(_profile);
expect(a.email).toEqual(_identity.email);
expect(a.name).toEqual(_identity.name);
done();
}).catch(err => {
done.fail(err);
});
});
}).catch(err => {
done.fail(err);
});
});
describe('Auth0 roles', () => {
it('calls the management API to retrieve all the roles and set viewer role', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(rolesReadScope.isDone()).toBe(true);
expect(userAssignRolesScope.isDone()).toBe(true);
expect(userRolesReadScope.isDone()).toBe(true);
done();
});
});
it('attaches the new scoped permissions to req.user', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
expect(request.user.scope).toBeUndefined();
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(request.user.scope).toEqual(_profile.scope);
done();
});
});
it('attaches roles to req.user', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
expect(request.user.roles).toBeUndefined();
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(request.user.roles).toBeDefined();
expect(request.user.roles.length).toEqual(1);
expect(request.user.roles[0].name).toEqual('viewer');
done();
});
});
});
describe('with pending team invitations', () => {
beforeEach(done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
done();
});
describe('a single invitation', () => {
let teamId;
beforeEach(done => {
teamId = uuid.v4();
models.Update.create({ recipient: _profile.email, uuid: teamId, type: 'team',
data: {name: '<NAME>', leader: _profile.email, id: teamId} }).then(results => {
done();
}).catch(err => {
done.fail(err);
});
});
it('removes the update from the database', done => {
models.Update.findAll().then(updates => {
expect(updates.length).toEqual(1);
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
models.Update.findAll().then(updates => {
expect(updates.length).toEqual(0);
done();
}).catch(err => {
done.fail(err);
});
});
}).catch(err => {
done.fail(err);
});
});
// For repurposing...
//
//it('writes the invite to the agent\'s user_metadata', done => {
// expect(request.user.user_metadata).toBeUndefined();
// checkPermissions([])(request, response, err => {
// if (err) return done.fail(err);
// expect(request.user.user_metadata).toBeDefined();
// expect(request.user.user_metadata.rsvps.length).toEqual(1);
// expect(request.user.user_metadata.rsvps[0].uuid).toEqual(teamId);
// expect(request.user.user_metadata.rsvps[0].type).toEqual('team');
// expect(request.user.user_metadata.rsvps[0].recipient).toEqual(_profile.email);
// expect(request.user.user_metadata.rsvps[0].data.name).toEqual('<NAME>');
// expect(request.user.user_metadata.rsvps[0].data.id).toEqual(teamId);
// expect(request.user.user_metadata.rsvps[0].data.leader).toEqual(_profile.email);
// expect(request.user.user_metadata.rsvps[0].data.organizationId).toBeUndefined();
// done();
// });
//});
describe('Auth0', () => {
it('is called to write the updates to the agent\'s user_metadata', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(userAppMetadataUpdateScope.isDone()).toBe(true);
expect(userAppMetadataUpdateOauthTokenScope.isDone()).toBe(true);
done();
});
});
});
describe('multiple invitations', () => {
let anotherTeamId;
beforeEach(done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
anotherTeamId = uuid.v4();
models.Update.create({ recipient: _profile.email, uuid: anotherTeamId, type: 'team',
data: {name: '<NAME>', leader: _profile.email, id: anotherTeamId} }).then(results => {
done();
}).catch(err => {
done.fail(err);
});
});
it('removes the update from the database', done => {
models.Update.findAll().then(updates => {
expect(updates.length).toEqual(2);
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
models.Update.findAll().then(updates => {
expect(updates.length).toEqual(0);
done();
}).catch(err => {
done.fail(err);
});
});
}).catch(err => {
done.fail(err);
});
});
// For repurposing...
//
//it('writes the invite to the agent\'s user_metadata', done => {
// expect(request.user.user_metadata).toBeUndefined();
// checkPermissions([])(request, response, err => {
// if (err) return done.fail(err);
// expect(request.user.user_metadata).toBeDefined();
// expect(request.user.user_metadata.rsvps.length).toEqual(2);
// expect(request.user.user_metadata.rsvps[0].uuid).toEqual(anotherTeamId);
// expect(request.user.user_metadata.rsvps[0].type).toEqual('team');
// expect(request.user.user_metadata.rsvps[0].recipient).toEqual(_profile.email);
// expect(request.user.user_metadata.rsvps[0].data.name).toEqual('The Bu<NAME>');
// expect(request.user.user_metadata.rsvps[0].data.id).toEqual(anotherTeamId);
// expect(request.user.user_metadata.rsvps[0].data.leader).toEqual(_profile.email);
// expect(request.user.user_metadata.rsvps[0].data.organizationId).toBeUndefined();
// expect(request.user.user_metadata.rsvps[1].uuid).toEqual(teamId);
// expect(request.user.user_metadata.rsvps[1].type).toEqual('team');
// expect(request.user.user_metadata.rsvps[1].recipient).toEqual(_profile.email);
// expect(request.user.user_metadata.rsvps[1].data.name).toEqual('<NAME>');
// expect(request.user.user_metadata.rsvps[1].data.id).toEqual(teamId);
// expect(request.user.user_metadata.rsvps[1].data.leader).toEqual(_profile.email);
// expect(request.user.user_metadata.rsvps[1].data.organizationId).toBeUndefined();
// done();
// });
//});
describe('Auth0', () => {
it('is called to write the updates to the agent\'s user_metadata', done => {
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, scope: undefined}
});
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(userAppMetadataUpdateScope.isDone()).toBe(true);
expect(userAppMetadataUpdateOauthTokenScope.isDone()).toBe(true);
done();
});
});
});
});
});
});
});
describe('invitations as updates', () => {
let userAppMetadataUpdateScope, userAppMetadataUpdateOauthTokenScope;
describe('team updates', () => {
let anotherTeamId;
beforeEach(done => {
anotherTeamId = uuid.v4();
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, email: '<EMAIL>', name: '<NAME>', scope: undefined,
user_metadata: { teams: [{name: 'The Buffalo Bandits', id: anotherTeamId, leader: _profile.email}] } }
});
models.Update.create({ recipient: '<EMAIL>', uuid: anotherTeamId, type: 'team',
data: {name: 'The Beefalo Bandits', leader: _profile.email, id: anotherTeamId} }).then(results => {
stubUserAppMetadataUpdate((err, apiScopes) => {
if (err) return done.fail();
({userAppMetadataUpdateScope, userAppMetadataUpdateOauthTokenScope} = apiScopes);
done();
});
}).catch(err => {
done.fail(err);
});
});
it('removes the update from the database', done => {
models.Update.findAll().then(updates => {
expect(updates.length).toEqual(1);
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
models.Update.findAll().then(updates => {
expect(updates.length).toEqual(0);
done();
}).catch(err => {
done.fail(err);
});
});
}).catch(err => {
done.fail(err);
});
});
// For repurposing...
//
//it('writes the update to the agent\'s user_metadata', done => {
// expect(request.user.user_metadata.rsvps).toBeUndefined();
// expect(request.user.user_metadata.teams.length).toEqual(1);
// expect(request.user.user_metadata.teams[0].name).toEqual('The Buffalo Bandits');
// checkPermissions([])(request, response, err => {
// if (err) return done.fail(err);
// expect(request.user.user_metadata.rsvps.length).toEqual(0);
// expect(request.user.user_metadata.teams.length).toEqual(1);
// expect(request.user.user_metadata.teams[0].name).toEqual('The Beefalo Bandits');
// done();
// });
//});
describe('Auth0 roles', () => {
it('calls the management API update user_metadata', done => {
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(userAppMetadataUpdateOauthTokenScope.isDone()).toBe(true);
expect(userAppMetadataUpdateScope.isDone()).toBe(true);
done();
});
});
});
});
describe('rsvp updates', () => {
let anotherTeamId;
beforeEach(done => {
anotherTeamId = uuid.v4();
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: {..._profile, email: '<EMAIL>', name: '<NAME>', scope: undefined,
user_metadata: {
rsvps: [{
uuid: anotherTeamId, type: 'team', recipient: '<EMAIL>',
data: { name: 'The Buffalo Bandits', id: anotherTeamId, leader: '<EMAIL>', organizationId: uuid.v4() } }] } }
});
models.Update.create({ recipient: '<EMAIL>', uuid: anotherTeamId, type: 'team',
data: {name: 'The Beefalo Bandits', leader: _profile.email, id: anotherTeamId} }).then(results => {
stubUserAppMetadataUpdate((err, apiScopes) => {
if (err) return done.fail();
({userAppMetadataUpdateScope, userAppMetadataUpdateOauthTokenScope} = apiScopes);
done();
});
}).catch(err => {
done.fail(err);
});
});
it('removes the update from the database', done => {
models.Update.findAll().then(updates => {
expect(updates.length).toEqual(1);
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
models.Update.findAll().then(updates => {
expect(updates.length).toEqual(0);
done();
}).catch(err => {
done.fail(err);
});
});
}).catch(err => {
done.fail(err);
});
});
// For repurposing...
//
//it('writes the update to the agent\'s user_metadata', done => {
// expect(request.user.user_metadata.rsvps.length).toEqual(1);
// expect(request.user.user_metadata.rsvps[0].data.name).toEqual('The Buffalo Bandits');
// expect(request.user.user_metadata.teams).toBeUndefined(0);
// checkPermissions([])(request, response, err => {
// if (err) return done.fail(err);
// expect(request.user.user_metadata.teams.length).toEqual(0);
// expect(request.user.user_metadata.rsvps.length).toEqual(1);
// expect(request.user.user_metadata.rsvps[0].data.name).toEqual('The Beefalo Bandits');
// done();
// });
//});
describe('Auth0 roles', () => {
it('calls the management API update user_metadata', done => {
checkPermissions([])(request, response, err => {
if (err) return done.fail(err);
expect(userAppMetadataUpdateOauthTokenScope.isDone()).toBe(true);
expect(userAppMetadataUpdateScope.isDone()).toBe(true);
done();
});
});
});
});
});
describe('unauthorized', () => {
it('returns 403', done => {
response = httpMocks.createResponse();
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: _profile
});
expect(response.statusCode).toEqual(200);
checkPermissions([scope.create.agents])(request, response, err => {
if (!err) return done.fail('There should have been an error');
expect(err.error).toEqual('Forbidden');
expect(err.message).toEqual('Insufficient scope');
expect(err.statusCode).toEqual(403);
done();
});
});
});
describe('unauthenticated', () => {
it('redirects to login', done => {
response = httpMocks.createResponse();
request = httpMocks.createRequest({
method: 'POST',
url: '/agent',
user: undefined // Passport never got a hold of the request object
});
expect(response.statusCode).toEqual(200);
// This may prove a bit flaky...
// The status code stuff happens outside anything asynchronous
checkPermissions([scope.read.agents])(request, response, function(err) {
done.fail('Should not get here');
});
expect(response.statusCode).toEqual(302);
expect(response._getRedirectUrl()).toEqual('/login');
done();
});
});
});
|
uporabnik1/VoltaDiscord-Musicbot
|
VoltaDiscordGlasba/commands/premor.js
|
<reponame>uporabnik1/VoltaDiscord-Musicbot
exports.run = async(client, message) => {
const channel = message.member.voice.channel;
if (!channel) return message.channel.send('Za uporabo tega ukaza moraš biti v klicu!');
let queue = message.client.queue.get(message.guild.id)
if(!queue) return message.channel.send({
embed: {
description: 'Ne predvaja se nič, kar bi lahko bilo ustavljeno!'
}
})
if(queue.playing !== false)
queue.connection.dispatcher.pause()
message.react('⏸')
message.channel.send('Skladba je bila začasno ustavljena, za nadaljevanje pošlji v!nadaljuj!')
}
|
markphip/testing
|
jira-dvcs-connector-pageobjects/src/main/java/com/atlassian/jira/plugins/dvcs/pageobjects/page/account/AccountControlsDialog.java
|
package com.atlassian.jira.plugins.dvcs.pageobjects.page.account;
import com.atlassian.pageobjects.elements.ElementBy;
import com.atlassian.pageobjects.elements.PageElement;
import com.atlassian.pageobjects.elements.WebDriverElement;
import com.atlassian.pageobjects.elements.WebDriverLocatable;
import com.atlassian.pageobjects.elements.timeout.TimeoutType;
import org.openqa.selenium.By;
/**
* Controls dialog of {@link Account}.
*/
public class AccountControlsDialog extends WebDriverElement
{
@ElementBy(linkText = "Refresh list")
private PageElement refreshLink;
@ElementBy(linkText = "Reset OAuth Settings")
private PageElement regenerateLink;
public AccountControlsDialog(By locator, WebDriverLocatable parent, TimeoutType timeoutType)
{
super(locator, parent, timeoutType);
}
public AccountControlsDialog(By locator, WebDriverLocatable parent)
{
super(locator, parent);
}
public AccountControlsDialog(By locator)
{
super(locator);
}
/**
* Refreshes repositories list of account.
*/
public void refresh()
{
refreshLink.click();
}
/**
* Regenerates account OAuth.
*/
public void regenerate()
{
regenerateLink.click();
}
}
|
jasper-yeh/VtkDotNet
|
Imaging/Sources/vtkImageGaussianSource.cxx
|
/*=========================================================================
Program: Visualization Toolkit
Module: vtkImageGaussianSource.cxx
Copyright (c) <NAME>, <NAME>, <NAME>
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
#include "vtkImageGaussianSource.h"
#include "vtkImageData.h"
#include "vtkInformation.h"
#include "vtkInformationVector.h"
#include "vtkObjectFactory.h"
#include "vtkStreamingDemandDrivenPipeline.h"
#include <math.h>
vtkStandardNewMacro(vtkImageGaussianSource);
//----------------------------------------------------------------------------
vtkImageGaussianSource::vtkImageGaussianSource()
{
this->SetNumberOfInputPorts(0);
this->Maximum = 1.0;
this->Center[0] = 0.0;
this->Center[1] = 0.0;
this->Center[2] = 0.0;
this->WholeExtent[0] = 0; this->WholeExtent[1] = 255;
this->WholeExtent[2] = 0; this->WholeExtent[3] = 255;
this->WholeExtent[4] = 0; this->WholeExtent[5] = 0;
this->StandardDeviation = 100.0;
}
//----------------------------------------------------------------------------
void vtkImageGaussianSource::SetWholeExtent(int xMin, int xMax,
int yMin, int yMax,
int zMin, int zMax)
{
int modified = 0;
if (this->WholeExtent[0] != xMin)
{
modified = 1;
this->WholeExtent[0] = xMin ;
}
if (this->WholeExtent[1] != xMax)
{
modified = 1;
this->WholeExtent[1] = xMax ;
}
if (this->WholeExtent[2] != yMin)
{
modified = 1;
this->WholeExtent[2] = yMin ;
}
if (this->WholeExtent[3] != yMax)
{
modified = 1;
this->WholeExtent[3] = yMax ;
}
if (this->WholeExtent[4] != zMin)
{
modified = 1;
this->WholeExtent[4] = zMin ;
}
if (this->WholeExtent[5] != zMax)
{
modified = 1;
this->WholeExtent[5] = zMax ;
}
if (modified)
{
this->Modified();
}
}
//----------------------------------------------------------------------------
int vtkImageGaussianSource::RequestInformation (
vtkInformation * vtkNotUsed(request),
vtkInformationVector** vtkNotUsed( inputVector ),
vtkInformationVector *outputVector)
{
// get the info objects
vtkInformation* outInfo = outputVector->GetInformationObject(0);
outInfo->Set(vtkDataObject::SPACING(), 1.0, 1.0, 1.0);
outInfo->Set(vtkDataObject::ORIGIN(), 0.0, 0.0, 0.0);
outInfo->Set(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(),
this->WholeExtent,6);
vtkDataObject::SetPointDataActiveScalarInfo(outInfo, VTK_DOUBLE, 1);
return 1;
}
//----------------------------------------------------------------------------
int vtkImageGaussianSource::RequestData(
vtkInformation* vtkNotUsed(request),
vtkInformationVector** vtkNotUsed(inputVector),
vtkInformationVector* outputVector)
{
double *outPtr;
int idxX, idxY, idxZ;
int maxX, maxY, maxZ;
vtkIdType outIncX, outIncY, outIncZ;
int *outExt;
double sum;
double yContrib, zContrib;
double temp, temp2;
unsigned long count = 0;
unsigned long target;
vtkInformation *outInfo = outputVector->GetInformationObject(0);
vtkImageData *output = vtkImageData::SafeDownCast(
outInfo->Get(vtkDataObject::DATA_OBJECT()));
vtkImageData *data = this->AllocateOutputData(output, outInfo);
if (data->GetScalarType() != VTK_DOUBLE)
{
vtkErrorMacro("Execute: This source only outputs doubles");
}
outExt = data->GetExtent();
// find the region to loop over
maxX = outExt[1] - outExt[0];
maxY = outExt[3] - outExt[2];
maxZ = outExt[5] - outExt[4];
// Get increments to march through data
data->GetContinuousIncrements(outExt, outIncX, outIncY, outIncZ);
outPtr = static_cast<double *>(data->GetScalarPointer(outExt[0],outExt[2],outExt[4]));
target = static_cast<unsigned long>((maxZ+1)*(maxY+1)/50.0);
target++;
// Loop through output pixels
temp2 = 1.0 / (2.0 * this->StandardDeviation * this->StandardDeviation);
for (idxZ = 0; idxZ <= maxZ; idxZ++)
{
zContrib = this->Center[2] - (idxZ + outExt[4]);
zContrib = zContrib*zContrib;
for (idxY = 0; !this->AbortExecute && idxY <= maxY; idxY++)
{
if (!(count%target))
{
this->UpdateProgress(count/(50.0*target));
}
count++;
yContrib = this->Center[1] - (idxY + outExt[2]);
yContrib = yContrib*yContrib;
for (idxX = 0; idxX <= maxX; idxX++)
{
// Pixel operation
sum = zContrib + yContrib;
temp = this->Center[0] - (idxX + outExt[0]);
sum = sum + (temp * temp);
*outPtr = this->Maximum * exp(-sum * temp2);
outPtr++;
}
outPtr += outIncY;
}
outPtr += outIncZ;
}
return 1;
}
void vtkImageGaussianSource::PrintSelf(ostream& os, vtkIndent indent)
{
this->Superclass::PrintSelf(os,indent);
os << indent << "Maximum: " << this->Maximum << "\n";
os << indent << "StandardDeviation: " << this->StandardDeviation << "\n";
os << indent << "Center: ( "
<< this->Center[0] << ", "
<< this->Center[1] << ", "
<< this->Center[2] << " )\n";
}
|
BoneyIsSpooky/Javacord
|
javacord-api/src/main/java/org/javacord/api/listener/server/role/RoleChangePermissionsListener.java
|
<reponame>BoneyIsSpooky/Javacord
package org.javacord.api.listener.server.role;
import org.javacord.api.event.server.role.RoleChangePermissionsEvent;
import org.javacord.api.listener.GloballyAttachableListener;
import org.javacord.api.listener.ObjectAttachableListener;
import org.javacord.api.listener.server.ServerAttachableListener;
/**
* This listener listens to role permission changes.
*/
@FunctionalInterface
public interface RoleChangePermissionsListener extends ServerAttachableListener, RoleAttachableListener,
GloballyAttachableListener, ObjectAttachableListener {
/**
* This method is called every time a role's permissions changes.
*
* @param event The event.
*/
void onRoleChangePermissions(RoleChangePermissionsEvent event);
}
|
shuigedeng/taotao-cloud-paren
|
taotao-cloud-java/taotao-cloud-javaee/src/main/java/com/taotao/cloud/java/javaee/s1/c4_spring/p3/java/aspect/MyAspect.java
|
package com.taotao.cloud.java.javaee.s1.c4_spring.p3.java.aspect;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.AfterThrowing;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.stereotype.Component;
@Aspect // 声明此类是一个切面类:会包含切入点(pointcut)和通知(advice)
@Component //声明组件,进入工厂
public class MyAspect {
// 定义切入点
@Pointcut("execution(* com.qf.service.UserServiceImpl.*(..))")
public void pc(){}
/*@Before("pc()") // 前置通知
public void mybefore(JoinPoint a) {
System.out.println("target:"+a.getTarget());
System.out.println("args:"+a.getArgs());
System.out.println("method's name:"+a.getSignature().getName());
System.out.println("before~~~~");
}
@AfterReturning(value="pc()",returning="ret") // 后置通知
public void myAfterReturning(JoinPoint a,Object ret){
System.out.println("after~~~~:"+ret);
}*/
/*@Around("pc()") // 环绕通知
public Object myInterceptor(ProceedingJoinPoint p) throws Throwable {
System.out.println("interceptor1~~~~");
Object ret = p.proceed();
System.out.println("interceptor2~~~~");
return ret;
}*/
@AfterThrowing(value="pc()",throwing="ex") // 异常通知
public void myThrows(JoinPoint jp,Exception ex){
System.out.println("throws");
System.out.println("===="+ex.getMessage());
}
}
|
dander521/HuoXunCaiJing
|
huoxun/Mine/Authentication/City/Model/Province.h
|
//
// Province.h
// CustomLocationPicker
//
// Created by apple on 2017/1/4.
// Copyright © 2017年 yuantuan. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "City.h"
@interface Province : NSObject
@property (nonatomic, strong) NSString *id; // id
@property (nonatomic, strong) NSString *name; // name
@property (nonatomic, strong) NSArray <City *>*city; // city
- (instancetype)initWithDictionary:(NSDictionary *)dictionary;
+ (instancetype)provinceWithDict:(NSDictionary *)dictionary;
+ (NSArray *)provinceWithArray:(NSArray *)array;
@end
|
TingeOGinge/stroustrup_ppp
|
code_snippets/Chapter09/chapter.9.6.cpp
|
//
// This is example code from Chapter 9.6 "Operator overloading" of
// "Programming -- Principles and Practice Using C++" by <NAME>
//
#include <iostream>
using namespace std;
//------------------------------------------------------------------------------
enum Month {
jan=1, feb, mar, apr, may, jun, jul, aug, sep, oct, nov, dcm
};
//------------------------------------------------------------------------------
Month operator++(Month& m) // prefix increment operator
{
m = (m==dcm) ? jan : Month(m+1); // "wrap around"
return m;
}
//------------------------------------------------------------------------------
ostream& operator<<(ostream& os, Month m)
{
static const char* month_tbl[12] = {
"January","February","March","April","May","June","July",
"August","September","October","November","December"
};
return os << month_tbl[m-1];
}
//------------------------------------------------------------------------------
class Vector {};
//int operator+(int,int); // error: you can't overload built-in +
Vector operator+(const Vector&, const Vector &); // ok
Vector operator+=(const Vector&, int); // ok
//------------------------------------------------------------------------------
int main()
{
Month m = sep;
++m; // m becomes oct
++m; // m becomes nov
++m; // m becomes dec
++m; // m becomes jan ("wrap around")
cout << m << endl;
return 0;
}
//------------------------------------------------------------------------------
|
Brendaneus/Railroad
|
test/factories.rb
|
FactoryBot.define do
factory :archiving do
title { "Test Archiving" }
content { "Sample Content" }
end
factory :blog_post do
title { "Test Blog Post" }
content { "Sample Content" }
end
factory :comment do
content { "Test Comment" }
factory :blog_post_comment do
association :post, factory: :blog_post
end
factory :forum_post_comment do
association :post, factory: :forum_post
end
factory :suggestion_comment do
association :post, factory: :suggestion
factory :archiving_suggestion_comment do
association :post, factory: :archiving_suggestion
end
factory :document_suggestion_comment do
association :post, factory: :document_suggestion
end
end
end
factory :document do
title { "Test Document" }
factory :archiving_document do
association :article, factory: :archiving
end
factory :blog_post_document do
association :article, factory: :blog_post
end
end
factory :forum_post do
user
title { "Test Forum Post" }
content { "Sample Content" }
end
factory :user do
name { "Test User" }
email { "<EMAIL>" }
password { "password" }
password_confirmation { "password" }
end
factory :session do
user
ip { "192.168.0.1" }
end
factory :suggestion do
user
name { "Test Suggestion" }
title { "Title Edit" }
content { "Content Edit" }
factory :archiving_suggestion do
association :citation, factory: :archiving
end
factory :document_suggestion do
association :citation, factory: :archiving_document
end
end
factory :version, class: PaperTrail::Version do
event { "Manual Update" }
factory :archiving_version do
association :item, factory: :archiving
end
factory :document_version do
association :item, factory: :document
end
end
end
|
ipelovski/emsbd
|
src/main/java/sasj/data/subject/SubjectService.java
|
package sasj.data.subject;
import sasj.data.grade.Grade;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Optional;
@Service
public class SubjectService {
@Autowired
private SubjectRepository subjectRepository;
@Autowired
private SubjectNameRepository subjectNameRepository;
public Subject create(String name, Grade grade) {
Optional<SubjectName> optionalSubjectName = subjectNameRepository.findByValue(name);
SubjectName subjectName = optionalSubjectName.orElseGet(() -> {
SubjectName newSubjectName = new SubjectName(name);
subjectNameRepository.save(newSubjectName);
return newSubjectName;
});
return subjectRepository.save(new Subject(subjectName, grade));
}
}
|
TheCodeYu/os
|
tools/omake/tolsrc/tk5lgpl1/t5lzma0d/7zip/Common/FileStreams.h
|
// FileStreams.h
#ifndef __FILESTREAMS_H
#define __FILESTREAMS_H
#ifdef WIN32
#include "../../Windows/FileIO.h"
#else
#include "../../Common/C_FileIO.h"
#endif
#include "../IStream.h"
#include "../../Common/MyCom.h"
class CInFileStream:
public IInStream,
public IStreamGetSize,
public CMyUnknownImp
{
public:
#ifdef WIN32
NWindows::NFile::NIO::CInFile File;
#else
NC::NFile::NIO::CInFile File;
#endif
CInFileStream() {}
bool Open(LPCTSTR fileName);
#ifdef WIN32
#ifndef _UNICODE
bool Open(LPCWSTR fileName);
#endif
#endif
MY_UNKNOWN_IMP2(IInStream, IStreamGetSize)
STDMETHOD(Read)(void *data, UInt32 size, UInt32 *processedSize);
STDMETHOD(ReadPart)(void *data, UInt32 size, UInt32 *processedSize);
STDMETHOD(Seek)(Int64 offset, UInt32 seekOrigin, UInt64 *newPosition);
STDMETHOD(GetSize)(UInt64 *size);
};
class CStdInFileStream:
public ISequentialInStream,
public CMyUnknownImp
{
public:
// HANDLE File;
// CStdInFileStream() File(INVALID_HANDLE_VALUE): {}
// void Open() { File = GetStdHandle(STD_INPUT_HANDLE); };
MY_UNKNOWN_IMP
STDMETHOD(Read)(void *data, UInt32 size, UInt32 *processedSize);
STDMETHOD(ReadPart)(void *data, UInt32 size, UInt32 *processedSize);
};
class COutFileStream:
public IOutStream,
public CMyUnknownImp
{
public:
#ifdef WIN32
NWindows::NFile::NIO::COutFile File;
#else
NC::NFile::NIO::COutFile File;
#endif
COutFileStream() {}
bool Open(LPCTSTR fileName);
#ifdef WIN32
#ifndef _UNICODE
bool Open(LPCWSTR fileName);
#endif
#endif
MY_UNKNOWN_IMP1(IOutStream)
STDMETHOD(Write)(const void *data, UInt32 size, UInt32 *processedSize);
STDMETHOD(WritePart)(const void *data, UInt32 size, UInt32 *processedSize);
STDMETHOD(Seek)(Int64 offset, UInt32 seekOrigin, UInt64 *newPosition);
STDMETHOD(SetSize)(Int64 newSize);
};
class CStdOutFileStream:
public ISequentialOutStream,
public CMyUnknownImp
{
public:
MY_UNKNOWN_IMP
STDMETHOD(Write)(const void *data, UInt32 size, UInt32 *processedSize);
STDMETHOD(WritePart)(const void *data, UInt32 size, UInt32 *processedSize);
};
#endif
|
mikolayek/ant-http
|
src/test/java/org/missinglink/http/client/HttpClientUriTest.java
|
/*
* Copyright <NAME> and other contributors as noted.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.missinglink.http.client;
import org.junit.Assert;
import org.junit.Test;
import org.missinglink.http.exception.InvalidUriException;
/**
* @author alex.sherwin
*
*/
public class HttpClientUriTest {
public HttpClientUriTest() {
super();
}
@Test
public void testValidHttpUris() throws InvalidUriException {
HttpClient.uri("http://host");
HttpClient.uri("HTTP://host");
HttpClient.uri("http://host/");
HttpClient.uri("http://host:80");
HttpClient.uri("http://host:80/");
HttpClient.uri("http://host/context");
HttpClient.uri("http://host:80/context");
HttpClient.uri("http://host:80/context?query=value");
HttpClient.uri("http://host:80/context/?query=value");
HttpClient.uri("http://host:80/context/file.html");
HttpClient.uri("http://host:80/context/file.html?query=value");
HttpClient.uri("http://host:80/context/file.html?query=value");
HttpClient.uri("http://host:80/context.with/periods./here/file-1.0.02.html?query=value");
}
@Test
public void testValidHttpsUris() throws InvalidUriException {
HttpClient.uri("https://host");
HttpClient.uri("HTTPS://host");
HttpClient.uri("https://host/");
HttpClient.uri("https://host:443");
HttpClient.uri("https://host:443/");
HttpClient.uri("https://host/context");
HttpClient.uri("https://host:443/context");
HttpClient.uri("https://host:443/context?query=value");
HttpClient.uri("https://host:443/context/?query=value");
HttpClient.uri("https://host:443/context/file.html");
HttpClient.uri("https://host:443/context/file.html?query=value");
HttpClient.uri("https://host:443/context/file.html?query=value");
HttpClient.uri("https://host:443/context.with/periods./here/file-1.0.02.html?query=value");
}
@Test(expected = InvalidUriException.class)
public void testInvalidProtocolUri() throws Exception {
HttpClient.uri("htt://host");
}
@Test
public void testUriNoBuilderMethods() throws InvalidUriException {
HttpClient client = HttpClient.uri("http://host").toHttpClient();
Assert.assertEquals("http://host/", client.getUri());
client = HttpClient.uri("http://host/").toHttpClient();
Assert.assertEquals("http://host/", client.getUri());
client = HttpClient.uri("http://host/context").toHttpClient();
Assert.assertEquals("http://host/context", client.getUri());
client = HttpClient.uri("http://host/context/longer").toHttpClient();
Assert.assertEquals("http://host/context/longer", client.getUri());
client = HttpClient.uri("http://host/context/longer/").toHttpClient();
Assert.assertEquals("http://host/context/longer/", client.getUri());
client = HttpClient.uri("http://host/con text/longer/").toHttpClient();
Assert.assertEquals("http://host/con+text/longer/", client.getUri());
client = HttpClient.uri("http://host/context/longer/file.html").toHttpClient();
Assert.assertEquals("http://host/context/longer/file.html", client.getUri());
client = HttpClient.uri("http://host/context/longer/?qp").toHttpClient();
Assert.assertEquals("http://host/context/longer/?qp", client.getUri());
client = HttpClient.uri("http://host/context/longer/file.html?qp").toHttpClient();
Assert.assertEquals("http://host/context/longer/file.html?qp", client.getUri());
client = HttpClient.uri("http://host/context/longer/?qp=").toHttpClient();
Assert.assertEquals("http://host/context/longer/?qp", client.getUri());
client = HttpClient.uri("http://host/context/longer/file.html?qp=").toHttpClient();
Assert.assertEquals("http://host/context/longer/file.html?qp", client.getUri());
client = HttpClient.uri("http://host/context/longer/?qp=value").toHttpClient();
Assert.assertEquals("http://host/context/longer/?qp=value", client.getUri());
client = HttpClient.uri("http://host/context/longer/file.html?qp=value").toHttpClient();
Assert.assertEquals("http://host/context/longer/file.html?qp=value", client.getUri());
client = HttpClient.uri("http://host/context/longer/?qp=value with spaces").toHttpClient();
Assert.assertEquals("http://host/context/longer/?qp=value+with+spaces", client.getUri());
client = HttpClient.uri("http://host/context/longer/file.html?qp=value with spaces").toHttpClient();
Assert.assertEquals("http://host/context/longer/file.html?qp=value+with+spaces", client.getUri());
client = HttpClient.uri("http://host/context/longer/?qp2=value2&qp=value with spaces").toHttpClient();
Assert.assertEquals("http://host/context/longer/?qp2=value2&qp=value+with+spaces", client.getUri());
client = HttpClient.uri("http://host/context/longer/file.html?qp2=value2&qp=value with spaces").toHttpClient();
Assert.assertEquals("http://host/context/longer/file.html?qp2=value2&qp=value+with+spaces", client.getUri());
client = HttpClient.uri("http://host/context/longer/file.html?qp3=value3&qp2=value2&qp=value with spaces").toHttpClient();
Assert.assertEquals("http://host/context/longer/file.html?qp3=value3&qp2=value2&qp=value+with+spaces", client.getUri());
client = HttpClient.uri("http://host/context/longer/file.html?qp3=value3&qp2=value=stuff&qp=value").toHttpClient();
Assert.assertEquals("http://host/context/longer/file.html?qp3=value3&qp2=value=stuff&qp=value", client.getUri());
}
}
|
fredmorcos/attic
|
Projects/Jov/jov_mediacenter/src/obj.h
|
#ifndef __MC_OBJ__
#define __MC_OBJ__
#include <SDL.h>
#include <SDL_ttf.h>
typedef struct {
SDL_Texture *content_texture;
SDL_Texture *box_texture;
SDL_Texture *texture;
SDL_Rect rect;
} Obj;
void obj_destroy_textures (Obj *o);
SDL_bool obj_set_text (Obj *o,
SDL_Renderer *rend,
SDL_Color *color,
TTF_Font *font,
const char *t,
SDL_bool *size_changed);
SDL_bool obj_set_bg (Obj *o, SDL_Renderer *rend, SDL_Color *c);
SDL_bool obj_blit_textures (Obj *o, SDL_Renderer *rend);
#endif
|
andela/-ah-backend-thor
|
authors/apps/profiles/apps.py
|
<reponame>andela/-ah-backend-thor<filename>authors/apps/profiles/apps.py
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class ProfilesConfig(AppConfig):
name = 'authors.apps.profiles'
verbose_name = _('profiles')
def ready(self):
import authors.apps.profiles.signals
|
HIT-SCIR-xuanxuan/OpenKS
|
openks/distributed/quick-start/openKS_distributed/base/mode.py
|
<filename>openks/distributed/quick-start/openKS_distributed/base/mode.py
# Copyright (c) 2020 Room 525 Research Group, Zhejiang University.
# All Rights Reserved.
from __future__ import print_function
__all__ = ['Mode']
class Mode:
"""
There are various mode for fleet, each of them is designed for different model.
"""
TRANSPILER = 1
PSLIB = 2
COLLECTIVE = 3
|
konexios/moonstone
|
kronos-web/src/main/java/com/arrow/kronos/web/TelemetryListener.java
|
<reponame>konexios/moonstone
package com.arrow.kronos.web;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.amqp.core.Binding;
import org.springframework.amqp.core.BindingBuilder;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.core.TopicExchange;
import org.springframework.amqp.rabbit.connection.Connection;
import org.springframework.amqp.rabbit.connection.ConnectionFactory;
import org.springframework.amqp.rabbit.connection.ConnectionListener;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.event.EventListener;
import org.springframework.messaging.simp.stomp.StompHeaderAccessor;
import org.springframework.stereotype.Component;
import org.springframework.util.Assert;
import org.springframework.web.socket.messaging.SessionDisconnectEvent;
import org.springframework.web.socket.messaging.SessionUnsubscribeEvent;
import com.arrow.pegasus.service.RabbitListenerAbstract;
import com.fasterxml.jackson.core.type.TypeReference;
import moonstone.acn.MqttConstants;
import moonstone.acs.JsonUtils;
@Component
public class TelemetryListener extends RabbitListenerAbstract implements CommandLineRunner, ConnectionListener {
@Autowired
private TelemetryProcessor telemetryProcessor;
@Autowired
private TelemetrySubscription telemetrySubscription;
@Autowired
private ConnectionFactory connectionFactory;
@Autowired
private ApplicationEventPublisher applicationEventPublisher;
private Queue queue;
private TopicExchange exchange;
private String queueName;
private ConcurrentHashMap<String, LongAdder> gateways = new ConcurrentHashMap<>();
private ConcurrentHashMap<String, LongAdder> devices = new ConcurrentHashMap<>();
@Override
public void run(String... args) throws Exception {
String method = "run";
logInfo(method, "...");
start();
}
@Override
protected void postConstruct() {
super.postConstruct();
init();
setApplicationEventPublisher(applicationEventPublisher);
connectionFactory.addConnectionListener(this);
}
private void init() {
String method = "init";
logDebug(method, "declaring exchange %s", MqttConstants.DEFAULT_RABBITMQ_EXCHANGE);
if (exchange == null) {
exchange = new TopicExchange(MqttConstants.DEFAULT_RABBITMQ_EXCHANGE);
}
getRabbitAdmin().declareExchange(exchange);
if (queue == null) {
declareNewQueue();
}
synchronized (this) {
gateways.keySet().forEach(gatewayHid -> {
for (Binding binding : buildBinding(gatewayHid)) {
logInfo(method, "add binding: %s", binding.getRoutingKey());
getRabbitAdmin().declareBinding(binding);
}
});
}
}
private void declareNewQueue() {
String method = "declareQueue";
queueName = String.format("%s.%s", KronosWebConstants.DEVICE_TELEMETRY_RABBITMQ_QUEUE_NAME,
RandomStringUtils.randomAlphanumeric(10).toLowerCase());
queue = new Queue(queueName, true, true, true);
if (getRabbitAdmin().getQueueProperties(queueName) == null) {
logDebug(method, "declaring queue %s", queueName);
getRabbitAdmin().declareQueue(queue);
}
setQueues(new String[] { queueName });
}
@Override
public void receiveMessage(byte[] message, String queueName) {
String method = "receiveMessage";
if (MqttConstants.isGatewayToServerTelemetryRouting(queueName)) {
blockDispatch(new Runnable() {
@Override
public void run() {
try {
Map<String, String> telemetry = JsonUtils.fromJsonBytes(message,
new TypeReference<Map<String, String>>() {
});
telemetryProcessor.process(telemetry, devices.keySet());
} catch (Exception e) {
logError(method, e);
}
}
});
} else if (MqttConstants.isGatewayToServerTelemetryBatchRouting(queueName)) {
blockDispatch(new Runnable() {
@Override
public void run() {
try {
List<Map<String, String>> telemetryList = JsonUtils.fromJsonBytes(message,
new TypeReference<List<Map<String, String>>>() {
});
telemetryList.forEach(telemetry -> telemetryProcessor.process(telemetry, devices.keySet()));
} catch (Exception e) {
logError(method, e);
}
}
});
} else {
logError(method, "ERROR: queueName not supported: %s", queueName);
}
}
public void subscribe(String gatewayHid, String deviceHid, StompHeaderAccessor accessor) {
String method = "subscribe";
logDebug(method, "...");
Assert.hasText(gatewayHid, "gatewayHid is empty");
Assert.hasText(deviceHid, "deviceHid is empty");
logDebug(method, "%s", accessor);
synchronized (telemetrySubscription) {
TelemetrySubscription.TelemetrySubscriptionModel sub = telemetrySubscription
.subscribe(accessor.getSubscriptionId(), gatewayHid, deviceHid);
logDebug(method, "%s", sub);
if (sub != null) {
doSubscribe(sub.getGatewayHid(), sub.getDeviceHid());
}
}
}
public void unsubscribe(StompHeaderAccessor accessor) {
String method = "unsubscribe";
logDebug(method, "%s", accessor);
if (StringUtils.isNotEmpty(accessor.getSubscriptionId())) {
synchronized (telemetrySubscription) {
TelemetrySubscription.TelemetrySubscriptionModel sub = telemetrySubscription
.unsubscribe(accessor.getSubscriptionId());
logDebug(method, "%s %s", accessor.getSubscriptionId(), sub);
if (sub != null) {
doUnsubscribe(sub.getGatewayHid(), sub.getDeviceHid());
}
}
}
}
public void disconnect(StompHeaderAccessor accessor) {
String method = "disconnect";
synchronized (telemetrySubscription) {
for (TelemetrySubscription.TelemetrySubscriptionModel sub : telemetrySubscription.unsubscribeAll()
.values()) {
logDebug(method, "%s", sub);
if (sub != null) {
doUnsubscribe(sub.getGatewayHid(), sub.getDeviceHid());
}
}
}
}
@EventListener
public void unsubscribeEvent(SessionUnsubscribeEvent event) {
String method = "unsubscribeEvent";
logDebug(method, "...");
logDebug(method, "event=%s", event);
StompHeaderAccessor accessor = StompHeaderAccessor.wrap(event.getMessage());
unsubscribe(accessor);
}
@EventListener
public void disconnectEvent(SessionDisconnectEvent event) {
String method = "unsubscribeEvent";
logDebug(method, "...");
logDebug(method, "event=%s", event);
StompHeaderAccessor accessor = StompHeaderAccessor.wrap(event.getMessage());
disconnect(accessor);
}
@Override
public void onCreate(Connection connection) {
String method = "onCreate";
logDebug(method, "...");
logDebug(method, "connection: %s", connection);
init();
}
@Override
public void onClose(Connection connection) {
String method = "onClose";
logDebug(method, "connection: %s", connection);
}
@Override
protected void onMessageListenerContainerRestart() {
String method = "onMessageListenerContainerRestart";
logDebug(method, "...");
declareNewQueue();
init();
}
@Override
protected int getNumWorkerThreads() {
return KronosWebConstants.DEVICE_TELEMETRY_LISTENER_NUM_THREADS;
}
private synchronized void doSubscribe(String gatewayHid, String deviceHid) {
String method = "doSubscribe";
if (gatewayHid == null || deviceHid == null) {
return;
}
gateways.computeIfAbsent(gatewayHid, v -> {
for (Binding binding : buildBinding(gatewayHid)) {
logInfo(method, "add binding: %s", binding.getRoutingKey());
getRabbitAdmin().declareBinding(binding);
}
return new LongAdder();
});
gateways.computeIfPresent(gatewayHid, (k, v) -> v).increment();
devices.computeIfAbsent(deviceHid, v -> new LongAdder());
devices.computeIfPresent(deviceHid, (k, v) -> v).increment();
logDebug(method, "gateways=%s devices=%s", gateways, devices);
}
private synchronized void doUnsubscribe(String gatewayHid, String deviceHid) {
String method = "doUnsubscribe";
if (gatewayHid == null || deviceHid == null) {
return;
}
devices.computeIfPresent(deviceHid, (k, v) -> v).decrement();
if (devices.get(deviceHid) != null && devices.get(deviceHid).longValue() < 1) {
devices.remove(deviceHid);
}
gateways.computeIfPresent(gatewayHid, (k, v) -> v).decrement();
if (gateways.get(gatewayHid) != null && gateways.get(gatewayHid).longValue() < 1) {
for (Binding binding : buildBinding(gatewayHid)) {
logInfo(method, "remove binding: %s", binding.getRoutingKey());
getRabbitAdmin().removeBinding(binding);
}
gateways.remove(gatewayHid);
}
logDebug(method, "gateways=%s devices=%s", gateways, devices);
}
private Binding[] buildBinding(String gatewayHid) {
String match1 = String.format("#.%s", gatewayHid);
// TODO this is a work-round for a bug in iOS app that sent wrong
// routing key
String match2 = String.format("#.Optional(\"%s\")", gatewayHid);
return new Binding[] { BindingBuilder.bind(queue).to(exchange).with(match1),
BindingBuilder.bind(queue).to(exchange).with(match2) };
}
}
|
GhatnekarPreeti/ph-css
|
ph-css/src/main/java/com/helger/css/decl/CSSKeyframesBlock.java
|
/**
* Copyright (C) 2014-2021 <NAME> (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.css.decl;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.concurrent.NotThreadSafe;
import com.helger.commons.ValueEnforcer;
import com.helger.commons.annotation.Nonempty;
import com.helger.commons.annotation.ReturnsMutableCopy;
import com.helger.commons.collection.impl.CommonsArrayList;
import com.helger.commons.collection.impl.ICommonsList;
import com.helger.commons.hashcode.HashCodeGenerator;
import com.helger.commons.state.EChange;
import com.helger.commons.string.ToStringGenerator;
import com.helger.css.CSSSourceLocation;
import com.helger.css.ECSSVersion;
import com.helger.css.ICSSSourceLocationAware;
import com.helger.css.ICSSVersionAware;
import com.helger.css.ICSSWriterSettings;
/**
* keyframes block
*
* @author <NAME>
*/
@NotThreadSafe
public class CSSKeyframesBlock implements IHasCSSDeclarations <CSSKeyframesBlock>, ICSSVersionAware, ICSSSourceLocationAware
{
private final ICommonsList <String> m_aKeyframesSelectors;
private final CSSDeclarationContainer m_aDeclarations = new CSSDeclarationContainer ();
private CSSSourceLocation m_aSourceLocation;
public CSSKeyframesBlock (@Nonnull @Nonempty final String... aKeyframesSelectors)
{
ValueEnforcer.notEmptyNoNullValue (aKeyframesSelectors, "KeyframesSelectors");
m_aKeyframesSelectors = new CommonsArrayList <> (aKeyframesSelectors);
}
public CSSKeyframesBlock (@Nonnull @Nonempty final Iterable <String> aKeyframesSelectors)
{
ValueEnforcer.notEmptyNoNullValue (aKeyframesSelectors, "KeyframesSelectors");
m_aKeyframesSelectors = new CommonsArrayList <> (aKeyframesSelectors);
}
@Nonnull
@ReturnsMutableCopy
public ICommonsList <String> getAllKeyframesSelectors ()
{
return m_aKeyframesSelectors.getClone ();
}
@Nonnull
public CSSKeyframesBlock addDeclaration (@Nonnull final CSSDeclaration aDeclaration)
{
m_aDeclarations.addDeclaration (aDeclaration);
return this;
}
@Nonnull
public CSSKeyframesBlock addDeclaration (@Nonnegative final int nIndex, @Nonnull final CSSDeclaration aNewDeclaration)
{
m_aDeclarations.addDeclaration (nIndex, aNewDeclaration);
return this;
}
@Nonnull
public EChange removeDeclaration (@Nonnull final CSSDeclaration aDeclaration)
{
return m_aDeclarations.removeDeclaration (aDeclaration);
}
@Nonnull
public EChange removeDeclaration (@Nonnegative final int nDeclarationIndex)
{
return m_aDeclarations.removeDeclaration (nDeclarationIndex);
}
@Nonnull
public EChange removeAllDeclarations ()
{
return m_aDeclarations.removeAllDeclarations ();
}
@Nonnull
@ReturnsMutableCopy
public ICommonsList <CSSDeclaration> getAllDeclarations ()
{
return m_aDeclarations.getAllDeclarations ();
}
@Nullable
public CSSDeclaration getDeclarationAtIndex (@Nonnegative final int nIndex)
{
return m_aDeclarations.getDeclarationAtIndex (nIndex);
}
@Nonnull
public CSSKeyframesBlock setDeclarationAtIndex (@Nonnegative final int nIndex, @Nonnull final CSSDeclaration aNewDeclaration)
{
m_aDeclarations.setDeclarationAtIndex (nIndex, aNewDeclaration);
return this;
}
public boolean hasDeclarations ()
{
return m_aDeclarations.hasDeclarations ();
}
@Nonnegative
public int getDeclarationCount ()
{
return m_aDeclarations.getDeclarationCount ();
}
@Nullable
public CSSDeclaration getDeclarationOfPropertyName (@Nullable final String sPropertyName)
{
return m_aDeclarations.getDeclarationOfPropertyName (sPropertyName);
}
@Nonnull
@ReturnsMutableCopy
public ICommonsList <CSSDeclaration> getAllDeclarationsOfPropertyName (@Nullable final String sPropertyName)
{
return m_aDeclarations.getAllDeclarationsOfPropertyName (sPropertyName);
}
@Nonnull
@Nonempty
public String getAsCSSString (@Nonnull final ICSSWriterSettings aSettings, @Nonnegative final int nIndentLevel)
{
aSettings.checkVersionRequirements (this);
if (aSettings.isRemoveUnnecessaryCode () && !hasDeclarations ())
return "";
final boolean bOptimizedOutput = aSettings.isOptimizedOutput ();
final StringBuilder aSB = new StringBuilder ();
// Emit all selectors
for (final String sSelector : m_aKeyframesSelectors)
{
if (aSB.length () > 0)
aSB.append (bOptimizedOutput ? "," : ", ");
aSB.append (sSelector);
}
aSB.append (m_aDeclarations.getAsCSSString (aSettings, nIndentLevel));
return aSB.toString ();
}
@Nonnull
public ECSSVersion getMinimumCSSVersion ()
{
return ECSSVersion.CSS30;
}
public void setSourceLocation (@Nullable final CSSSourceLocation aSourceLocation)
{
m_aSourceLocation = aSourceLocation;
}
@Nullable
public CSSSourceLocation getSourceLocation ()
{
return m_aSourceLocation;
}
@Override
public boolean equals (final Object o)
{
if (o == this)
return true;
if (o == null || !getClass ().equals (o.getClass ()))
return false;
final CSSKeyframesBlock rhs = (CSSKeyframesBlock) o;
return m_aKeyframesSelectors.equals (rhs.m_aKeyframesSelectors) && m_aDeclarations.equals (rhs.m_aDeclarations);
}
@Override
public int hashCode ()
{
return new HashCodeGenerator (this).append (m_aKeyframesSelectors).append (m_aDeclarations).getHashCode ();
}
@Override
public String toString ()
{
return new ToStringGenerator (this).append ("KeyframesSelectors", m_aKeyframesSelectors)
.append ("Declarations", m_aDeclarations)
.appendIfNotNull ("SourceLocation", m_aSourceLocation)
.getToString ();
}
}
|
djewsbury/XLE
|
Assets/AssetServices.h
|
// Distributed under the MIT License (See
// accompanying file "LICENSE" or the website
// http://www.opensource.org/licenses/mit-license.php)
#pragma once
namespace Assets
{
class AssetSetManager;
class CompileAndAsyncManager;
class Services
{
public:
static AssetSetManager& GetAssetSets();
static CompileAndAsyncManager& GetAsyncMan();
static bool HasAssetSets();
};
}
|
gunten/gunten-commons
|
basic/src/main/java/io/bio/Constant.java
|
package io.bio;
public class Constant {
public static final int PORT = 9000;
public static final String SERVER_IP = "127.0.0.1";
public static final int BUFFER_SIZE = 1024;
public static final int CLIENT_NUM = 50;
}
|
RamiresOliv/Todyynho-Junior
|
commands/embed.js
|
const Discord = require("discord.js");
const c = require("../config/config.json");
exports.run = async (client, message, args) => {
await message.delete();
if (!message.member.hasPermission("ADMINISTRATOR"))
return message.channel.send(
`${message.author}, você não possui permissão para executar esse comando.`
); //.then(msg=> msg.delete(8000))
const embed3 = new Discord.MessageEmbed()
.setColor("#fff200")
.setTitle(":warning: **ERROR**")
.setDescription(
`<:xmark:314349398824058880> Oops ${message.author} porfavor crie uma mensagem para eu enviar!
need = \`()\`
optional = \`{}\`
error: \`undefined message\`
exampler: \`t.embed (message)\` `
)
.setTimestamp();
let mensg = args.join(" ");
if (!mensg) {
message.channel.send(embed3);
return undefined;
}
const embed = new Discord.MessageEmbed()
.setDescription(`${mensg}`)
.setColor("RANDOM")
.setTimestamp()
.setFooter(
`Publicado por: ${message.author.username}#${message.author.discriminator}`,
message.author.avatarURL
);
message.channel.send(embed);
};
|
EPapadopoulou/SOCIETIES-Platform
|
context-management/css-context-monitor/src/main/java/org/societies/context/source/css/impl/CssCtxMonitor.java
|
/**
* Copyright (c) 2011, SOCIETIES Consortium (WATERFORD INSTITUTE OF TECHNOLOGY (TSSG), HERIOT-WATT UNIVERSITY (HWU), SOLUTA.NET
* (SN), GERMAN AEROSPACE CENTRE (Deutsches Zentrum fuer Luft- und Raumfahrt e.V.) (DLR), Zavod za varnostne tehnologije
* informacijske družbe in elektronsko poslovanje (SETCCE), INSTITUTE OF COMMUNICATION AND COMPUTER SYSTEMS (ICCS), LAKE
* COMMUNICATIONS (LAKE), INTEL PERFORMANCE LEARNING SOLUTIONS LTD (INTEL), PORTUGAL TELECOM INOVAÇÃO, SA (PTIN), IBM Corp.,
* INSTITUT TELECOM (ITSUD), AMITEC DIACHYTI EFYIA PLIROFORIKI KAI EPIKINONIES ETERIA PERIORISMENIS EFTHINIS (AMITEC), TELECOM
* ITALIA S.p.a.(TI), TRIALOG (TRIALOG), Stiftelsen SINTEF (SINTEF), NEC EUROPE LTD (NEC))
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following
* conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.societies.context.source.css.impl;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.societies.api.comm.xmpp.interfaces.ICommManager;
import org.societies.api.context.model.CtxAssociation;
import org.societies.api.context.model.CtxAttribute;
import org.societies.api.context.model.CtxAttributeValueType;
import org.societies.api.context.model.CtxEntityIdentifier;
import org.societies.api.context.model.CtxIdentifier;
import org.societies.api.context.model.CtxModelType;
import org.societies.api.context.model.CtxOriginType;
import org.societies.api.context.model.IndividualCtxEntity;
import org.societies.api.identity.IIdentity;
import org.societies.api.identity.InvalidFormatException;
import org.societies.api.identity.Requestor;
import org.societies.api.internal.context.broker.ICtxBroker;
import org.societies.api.internal.context.model.CtxAssociationTypes;
import org.societies.api.internal.context.model.CtxAttributeTypes;
import org.societies.api.osgi.event.CSSEvent;
import org.societies.api.osgi.event.EventListener;
import org.societies.api.osgi.event.EventTypes;
import org.societies.api.osgi.event.IEventMgr;
import org.societies.api.osgi.event.InternalEvent;
import org.societies.api.schema.cis.community.Community;
import org.societies.api.schema.cssmanagement.CssRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
/**
* This class is used to update the CSS owner context based on CSS record
* changes.
*
* @author <a href="mailto:<EMAIL>"><NAME></a> (ICCS)
* @since 0.4
*/
@Service
@Lazy(false)
public class CssCtxMonitor extends EventListener {
/** The logging facility. */
private static final Logger LOG = LoggerFactory.getLogger(CssCtxMonitor.class);
private static final String[] EVENT_TYPES = { EventTypes.CSS_RECORD_EVENT,
EventTypes.CSS_FRIENDED_EVENT, EventTypes.CIS_CREATION,
EventTypes.CIS_SUBS, EventTypes.CIS_UNSUBS };
/** The internal Context Broker service. */
@Autowired(required=true)
private ICtxBroker ctxBroker;
/** The Event Mgr service. */
private IEventMgr eventMgr;
/** The Comm Mgr service. */
private ICommManager commMgr;
/** The executor service. */
private ExecutorService executorService = Executors.newSingleThreadExecutor();
@Autowired(required=true)
CssCtxMonitor(IEventMgr eventMgr, ICommManager commMgr) {
if (LOG.isInfoEnabled())
LOG.info(this.getClass() + " instantiated");
this.eventMgr = eventMgr;
this.commMgr = commMgr;
if (LOG.isInfoEnabled())
LOG.info("Registering for '" + Arrays.asList(EVENT_TYPES) + "' events");
this.eventMgr.subscribeInternalEvent(this, EVENT_TYPES, null);
// TODO unsubscribe when stopped?
}
/*
* @see org.societies.api.osgi.event.EventListener#handleExternalEvent(org.societies.api.osgi.event.CSSEvent)
*/
@Override
public void handleExternalEvent(CSSEvent event) {
if (LOG.isWarnEnabled())
LOG.warn("Received unexpected external '" + event.geteventType() + "' event: " + event);
}
/*
* @see org.societies.api.osgi.event.EventListener#handleInternalEvent(org.societies.api.osgi.event.InternalEvent)
*/
@Override
public void handleInternalEvent(InternalEvent event) {
if (LOG.isDebugEnabled())
LOG.debug("Received internal " + event.geteventType() + " event: " + event);
if (EventTypes.CSS_RECORD_EVENT.equals(event.geteventType())) {
if (!(event.geteventInfo() instanceof CssRecord)) {
LOG.error("Could not handle internal " + event.geteventType() + " event: "
+ "Expected event info of type " + CssRecord.class.getName()
+ " but was " + event.geteventInfo().getClass());
return;
}
final CssRecord cssRecord = (CssRecord) event.geteventInfo();
this.executorService.execute(new CssRecordUpdateHandler(cssRecord));
} else if (EventTypes.CSS_FRIENDED_EVENT.equals(event.geteventType())) {
if (event.geteventSource() == null || event.geteventSource().isEmpty()) {
LOG.error("Could not handle internal " + event.geteventType() + " event: "
+ "Expected non-empty event source "
+ " but was " + event.geteventSource());
return;
}
if (!(event.geteventInfo() instanceof String)) {
LOG.error("Could not handle internal " + event.geteventType() + " event: "
+ "Expected event info of type " + CssRecord.class.getName()
+ " but was " + event.geteventInfo().getClass());
return;
}
this.executorService.execute(new CssFriendedHandler(event.geteventSource(),
(String) event.geteventInfo()));
} else if (EventTypes.CIS_CREATION.equals(event.geteventType())
|| EventTypes.CIS_SUBS.equals(event.geteventType())
|| EventTypes.CIS_UNSUBS.equals(event.geteventType())) {
if (event.geteventSource() == null || event.geteventSource().length() == 0) {
LOG.error("Could not handle internal " + event.geteventType() + " event: "
+ "Expected non-null or non-empty event source of type IIdentity JID String"
+ " but was " + event.geteventSource());
return;
}
if (!(event.geteventInfo() instanceof Community)) {
LOG.error("Could not handle internal " + event.geteventType() + " event: "
+ "Expected event info of type " + Community.class.getName()
+ " but was " + event.geteventInfo().getClass());
return;
}
final Community cisRecord = (Community) event.geteventInfo();
if (EventTypes.CIS_CREATION.equals(event.geteventType())
|| EventTypes.CIS_SUBS.equals(event.geteventType()))
this.executorService.execute(new CssJoinedCisHandler(
event.geteventSource(), cisRecord.getCommunityJid()));
else //if (EventTypes.CIS_UNSUBS.equals(event.geteventType()))
this.executorService.execute(new CssLeftCisHandler(
event.geteventSource(), cisRecord.getCommunityJid()));
} else {
if (LOG.isWarnEnabled())
LOG.warn("Received unexpeted event of type '" + event.geteventType() + "'");
}
}
private class CssRecordUpdateHandler implements Runnable {
private final CssRecord cssRecord;
private CssRecordUpdateHandler(final CssRecord cssRecord) {
this.cssRecord = cssRecord;
}
/*
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
if (LOG.isInfoEnabled())
LOG.info("Updated CSS record: " + cssRecord);
final String cssIdStr = cssRecord.getCssIdentity();
try {
IIdentity cssId = commMgr.getIdManager().fromJid(cssIdStr);
CtxEntityIdentifier ownerCtxId =
ctxBroker.retrieveIndividualEntity(cssId).get().getId();
String value;
// NAME
value = cssRecord.getName();
if (value != null && !value.isEmpty())
updateCtxAttribute(ownerCtxId, CtxAttributeTypes.NAME, value);
// EMAIL
value = cssRecord.getEmailID();
if (value != null && !value.isEmpty())
updateCtxAttribute(ownerCtxId, CtxAttributeTypes.EMAIL, value);
// ADDRESS_HOME_CITY
value = cssRecord.getHomeLocation();
if (value != null && !value.isEmpty())
updateCtxAttribute(ownerCtxId, CtxAttributeTypes.ADDRESS_HOME_CITY, value);
} catch (InvalidFormatException ife) {
LOG.error("Invalid CSS IIdentity found in CSS record: "
+ ife.getLocalizedMessage(), ife);
} catch (Exception e) {
LOG.error("Failed to access context data: "
+ e.getLocalizedMessage(), e);
}
}
}
private class CssFriendedHandler implements Runnable {
private final String myCssIdStr;
private final String newFriendIdStr;
private CssFriendedHandler(final String myCssIdStr, final String newFriendIdStr) {
this.myCssIdStr = myCssIdStr;
this.newFriendIdStr = newFriendIdStr;
}
/*
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
if (LOG.isInfoEnabled())
LOG.info("CSS '" + myCssIdStr + "' friended '" + newFriendIdStr + "'");
try {
final IIdentity myCssId = commMgr.getIdManager().fromJid(myCssIdStr);
final IndividualCtxEntity myCssEnt =
ctxBroker.retrieveIndividualEntity(myCssId).get();
final IIdentity newFriendId = commMgr.getIdManager().fromJid(newFriendIdStr);
final CtxEntityIdentifier newFriendEntId =
ctxBroker.retrieveIndividualEntityId(
new Requestor(myCssId), newFriendId).get();
final CtxAssociation isFriendsWithAssoc;
if (myCssEnt.getAssociations(CtxAssociationTypes.IS_FRIENDS_WITH).isEmpty())
isFriendsWithAssoc = ctxBroker.createAssociation(
new Requestor(myCssId), myCssId, CtxAssociationTypes.IS_FRIENDS_WITH).get();
else
isFriendsWithAssoc = (CtxAssociation) ctxBroker.retrieve(
myCssEnt.getAssociations(CtxAssociationTypes.IS_FRIENDS_WITH).iterator().next()).get();
isFriendsWithAssoc.setParentEntity(myCssEnt.getId());
isFriendsWithAssoc.addChildEntity(newFriendEntId);
ctxBroker.update(isFriendsWithAssoc);
} catch (InvalidFormatException ife) {
LOG.error("Invalid CSS IIdentity found in CSS record: "
+ ife.getLocalizedMessage(), ife);
} catch (Exception e) {
LOG.error("Failed to access context data: "
+ e.getLocalizedMessage(), e);
}
}
}
private class CssJoinedCisHandler implements Runnable {
private final String myCssIdStr;
private final String cisIdStr;
private CssJoinedCisHandler(final String myCssIdStr, final String cisIdStr) {
this.myCssIdStr = myCssIdStr;
this.cisIdStr = cisIdStr;
}
/*
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
if (LOG.isInfoEnabled())
LOG.info("CSS '" + this.myCssIdStr + "' joined CIS '" + this.cisIdStr + "'");
try {
final IIdentity myCssId = commMgr.getIdManager().fromJid(
this.myCssIdStr);
final IndividualCtxEntity myCssEnt =
ctxBroker.retrieveIndividualEntity(myCssId).get();
final IIdentity cisId = commMgr.getIdManager().fromJid(this.cisIdStr);
///////////////////////////////////////////////////////////////
// The CommunityCtxEntity might not be available right after
// the CIS_CREATION event:
// (1) check if it can be retrieved, otherwise
// (2) wait for its creation
///////////////////////////////////////////////////////////////
// (1) check if the CommunityCtxEntity has already been created
CtxEntityIdentifier cisEntId =
ctxBroker.retrieveCommunityEntityId(
new Requestor(myCssId), cisId).get();
// (2) if not available, wait until it's created
// TODO find better way (event-based?)
if (cisEntId == null) {
if (LOG.isDebugEnabled())
LOG.debug("Waiting for the CommunityCtxEntity of CIS '"
+ cisId + "' to be created");
int retries = 10;
while (retries > 0) {
Thread.sleep(500);
cisEntId = ctxBroker.retrieveCommunityEntityId(
new Requestor(myCssId), cisId).get();
retries--;
if (cisEntId != null)
break;
}
}
if (cisEntId == null) {
LOG.error("CommunityCtxEntity of CIS '" + cisId + "' is not available!!");
return;
}
final CtxAssociation isMemberOfAssoc;
if (myCssEnt.getAssociations(CtxAssociationTypes.IS_MEMBER_OF).isEmpty())
isMemberOfAssoc = ctxBroker.createAssociation(
new Requestor(myCssId), myCssId, CtxAssociationTypes.IS_MEMBER_OF).get();
else
isMemberOfAssoc = (CtxAssociation) ctxBroker.retrieve(
myCssEnt.getAssociations(CtxAssociationTypes.IS_MEMBER_OF).iterator().next()).get();
isMemberOfAssoc.setParentEntity(myCssEnt.getId());
isMemberOfAssoc.addChildEntity(cisEntId);
ctxBroker.update(isMemberOfAssoc);
} catch (InvalidFormatException ife) {
LOG.error("Invalid CSS/CIS IIdentity: "
+ ife.getLocalizedMessage(), ife);
} catch (Exception e) {
LOG.error("Failed to access context data: "
+ e.getLocalizedMessage(), e);
}
}
}
private class CssLeftCisHandler implements Runnable {
private final String cssIdStr;
private final String cisIdStr;
private CssLeftCisHandler(final String cssIdStr, final String cisIdStr) {
this.cssIdStr = cssIdStr;
this.cisIdStr = cisIdStr;
}
/*
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
if (LOG.isInfoEnabled())
LOG.info("CSS '" + this.cssIdStr + "' left CIS '" + this.cisIdStr + "'");
try {
final IIdentity myCssId = commMgr.getIdManager().fromJid(
this.cssIdStr);
final IndividualCtxEntity myCssEnt =
ctxBroker.retrieveIndividualEntity(myCssId).get();
final IIdentity cisId = commMgr.getIdManager().fromJid(cisIdStr);
final CtxEntityIdentifier cisEntId =
ctxBroker.retrieveCommunityEntityId(
new Requestor(myCssId), cisId).get();
final CtxAssociation isMemberOfAssoc;
if (!myCssEnt.getAssociations(CtxAssociationTypes.IS_MEMBER_OF).isEmpty()) {
isMemberOfAssoc = (CtxAssociation) ctxBroker.retrieve(
myCssEnt.getAssociations(CtxAssociationTypes.IS_MEMBER_OF).iterator().next()).get();
isMemberOfAssoc.setParentEntity(myCssEnt.getId());
isMemberOfAssoc.removeChildEntity(cisEntId);
ctxBroker.update(isMemberOfAssoc);
}
} catch (InvalidFormatException ife) {
LOG.error("Invalid CSS/CIS IIdentity: "
+ ife.getLocalizedMessage(), ife);
} catch (Exception e) {
LOG.error("Failed to access context data: "
+ e.getLocalizedMessage(), e);
}
}
}
private void updateCtxAttribute(CtxEntityIdentifier ownerCtxId,
String type, String value) throws Exception {
if (LOG.isDebugEnabled())
LOG.debug("Updating '" + type + "' of entity " + ownerCtxId + " to '" + value + "'");
final List<CtxIdentifier> ctxIds =
this.ctxBroker.lookup(ownerCtxId, CtxModelType.ATTRIBUTE, type).get();
final CtxAttribute attr;
if (!ctxIds.isEmpty())
attr = (CtxAttribute) this.ctxBroker.retrieve(ctxIds.get(0)).get();
else
attr = this.ctxBroker.createAttribute(ownerCtxId, type).get();
attr.setStringValue(value);
attr.setValueType(CtxAttributeValueType.STRING);
attr.getQuality().setOriginType(CtxOriginType.MANUALLY_SET);
this.ctxBroker.update(attr);
}
}
|
uptonking/play-servlet-rest
|
src/main/java/com/github/datalking/util/web/RequestContextUtils.java
|
package com.github.datalking.util.web;
import com.github.datalking.web.context.WebApplicationContext;
import com.github.datalking.web.servlet.DispatcherServlet;
import com.github.datalking.web.servlet.flash.FlashMap;
import com.github.datalking.web.servlet.FlashMapManager;
import javax.servlet.ServletContext;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
import java.util.Locale;
import java.util.Map;
/**
*/
public abstract class RequestContextUtils {
public static WebApplicationContext getWebApplicationContext(ServletRequest request) throws IllegalStateException {
return getWebApplicationContext(request, null);
}
public static WebApplicationContext getWebApplicationContext(ServletRequest request, ServletContext servletContext) throws IllegalStateException {
WebApplicationContext webApplicationContext = (WebApplicationContext) request.getAttribute(DispatcherServlet.WEB_APPLICATION_CONTEXT_ATTRIBUTE);
if (webApplicationContext == null) {
if (servletContext == null) {
throw new IllegalStateException("No WebApplicationContext found: not in a DispatcherServlet request?");
}
webApplicationContext = WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext);
}
return webApplicationContext;
}
/**
* Return the LocaleResolver that has been bound to the request by the
* DispatcherServlet.
*
* @param request current HTTP request
* @return the current LocaleResolver, or {@code null} if not found
*/
// public static LocaleResolver getLocaleResolver(HttpServletRequest request) {
// return (LocaleResolver) request.getAttribute(DispatcherServlet.LOCALE_RESOLVER_ATTRIBUTE);
// }
/**
* Retrieves the current locale from the given request,
* using the LocaleResolver bound to the request by the DispatcherServlet
* (if available), falling back to the request's accept-header Locale.
*
* @param request current HTTP request
* @return the current locale, either from the LocaleResolver or from
* the plain request
* @see #getLocaleResolver
* @see javax.servlet.http.HttpServletRequest#getLocale()
*/
// public static Locale getLocale(HttpServletRequest request) {
// LocaleResolver localeResolver = getLocaleResolver(request);
// if (localeResolver != null) {
// return localeResolver.resolveLocale(request);
// } else {
// return request.getLocale();
// }
// }
/**
* Return the ThemeResolver that has been bound to the request by the
* DispatcherServlet.
*
* @param request current HTTP request
* @return the current ThemeResolver, or {@code null} if not found
*/
// public static ThemeResolver getThemeResolver(HttpServletRequest request) {
// return (ThemeResolver) request.getAttribute(DispatcherServlet.THEME_RESOLVER_ATTRIBUTE);
// }
/**
* Return the ThemeSource that has been bound to the request by the
* DispatcherServlet.
*
* @param request current HTTP request
* @return the current ThemeSource
*/
// public static ThemeSource getThemeSource(HttpServletRequest request) {
// return (ThemeSource) request.getAttribute(DispatcherServlet.THEME_SOURCE_ATTRIBUTE);
// }
/**
* Retrieves the current theme from the given request, using the ThemeResolver
* and ThemeSource bound to the request by the DispatcherServlet.
*
* @param request current HTTP request
* @return the current theme, or {@code null} if not found
* @see #getThemeResolver
*/
// public static Theme getTheme(HttpServletRequest request) {
// ThemeResolver themeResolver = getThemeResolver(request);
// ThemeSource themeSource = getThemeSource(request);
// if (themeResolver != null && themeSource != null) {
// String themeName = themeResolver.resolveThemeName(request);
// return themeSource.getTheme(themeName);
// } else {
// return null;
// }
// }
/**
* Return a read-only {@link Map} with "input" flash attributes saved on a
* previous request.
*
* @param request the current request
* @return a read-only Map, or {@code null} if not found
*/
@SuppressWarnings("unchecked")
public static Map<String, ?> getInputFlashMap(HttpServletRequest request) {
return (Map<String, ?>) request.getAttribute(DispatcherServlet.INPUT_FLASH_MAP_ATTRIBUTE);
}
/**
* Return the "output" FlashMap with attributes to save for a subsequent request.
*
* @param request the current request
* @return a {@link FlashMap} instance (never {@code null} within a DispatcherServlet request)
*/
public static FlashMap getOutputFlashMap(HttpServletRequest request) {
return (FlashMap) request.getAttribute(DispatcherServlet.OUTPUT_FLASH_MAP_ATTRIBUTE);
}
/**
* Return the FlashMapManager instance to save flash attributes with
* before a redirect.
*
* @param request the current request
* @return a {@link FlashMapManager} instance (never {@code null} within a DispatcherServlet request)
*/
public static FlashMapManager getFlashMapManager(HttpServletRequest request) {
return (FlashMapManager) request.getAttribute(DispatcherServlet.FLASH_MAP_MANAGER_ATTRIBUTE);
}
}
|
rstancioiu/uva-online-judge
|
Volume102/10267 - Graphical Editor/10267.cpp
|
// Author: <NAME>
// Problem: http://uva.onlinejudge.org/external/102/10267.html
#include <iostream>
#include <string>
#include <queue>
#include <utility>
#include <sstream>
#define N 256
#define M 256
using namespace std;
string v[M];
int visited[N][M];
int m,n;
void initialize()
{
for(int i=0;i<n;++i)
v[i]="";
for(int i=0;i<n;++i)
{
for(int j=0;j<m;++j)
{
v[i]+='O';
}
}
}
void clear()
{
for(int i=0;i<n;++i)
{
for(int j=0;j<m;++j)
{
v[i][j]='O';
}
}
}
void drawVertical(int x, int y1,int y2 ,char c)
{
if(y1>y2)
{
int aux=y1;
y1=y2;
y2=aux;
}
for(int i=y1;i<=y2;++i)
v[i][x]=c;
}
void drawHorizontal(int x1,int x2,int y,char c)
{
if(x1>x2)
{
int aux=x1;
x1=x2;
x2=aux;
}
for(int i=x1;i<=x2;++i)
{
v[y][i]=c;
}
}
void drawRectangle(int x1,int x2,int y1,int y2,char c)
{
if(x1>x2)
{
int aux=x1;
x1=x2;
x2=aux;
}
if(y1>y2)
{
int aux=y1;
y1=y2;
y2=aux;
}
for(int i=x1;i<=x2;++i)
{
for(int j=y1;j<=y2;++j)
{
v[j][i]=c;
}
}
}
void drawRegion(int x,int y,char c)
{
if(v[x][y]==c)
{
return;
}
else
{
for(int i=0;i<n;++i)
{
for(int j=0;j<m;++j)
{
visited[i][j]=0;
}
}
char c1=v[x][y];
queue<pair<int,int> > queue;
queue.push(make_pair(x,y));
visited[x][y]=1;
while(!queue.empty())
{
int t1=queue.front().first;
int t2=queue.front().second;
v[t1][t2]=c;
queue.pop();
if(t1-1>=0 && v[t1-1][t2]==c1 && !visited[t1-1][t2])
{
queue.push(make_pair(t1-1,t2));
visited[t1-1][t2];
}
if(t1+1<n && v[t1+1][t2]==c1 && !visited[t1+1][t2])
{
queue.push(make_pair(t1+1,t2));
visited[t1+1][t2]=1;
}
if(t2>0 && v[t1][t2-1]==c1 && !visited[t1][t2-1])
{
queue.push(make_pair(t1,t2-1));
visited[t1][t2-1]=1;
}
if(t2+1<m && v[t1][t2+1]==c1 && !visited[t1][t2+1])
{
queue.push(make_pair(t1,t2+1));
visited[t1][t2+1]=1;
}
}
}
}
void write(string name)
{
cout<<name<<endl;
for(int j=0;j<n;++j)
{
cout<<v[j]<<endl;
}
}
int main()
{
m=0;
n=0;
string name;
int x,y,y1,y2,x1,x2;
char color;
string s;
while(1)
{
getline(cin,s);
istringstream iss(s);
char c;
iss>>c;
if(c=='X')
break;
switch(c)
{
case 'I':
iss>>m>>n;
initialize();
break;
case 'C': clear();
break;
case 'L':
iss>>x>>y;
iss>>color;
v[y-1][x-1]=color;
break;
case 'V':
iss>>x>>y1>>y2;
iss>>color;
drawVertical(x-1,y1-1,y2-1,color);
break;
case 'H':
iss>>x1>>x2>>y;
iss>>color;
drawHorizontal(x1-1,x2-1,y-1,color);
break;
case 'K':
iss>>x1>>y1>>x2>>y2;
iss>>color;
drawRectangle(x1-1,x2-1,y1-1,y2-1,color);
break;
case 'F':
iss>>x>>y;
iss>>color;
drawRegion(y-1,x-1,color);
break;
case 'S':
iss>>name;
write(name);
break;
default :
break;
}
}
return 0;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.