code stringlengths 3 1.01M | repo_name stringlengths 5 116 | path stringlengths 3 311 | language stringclasses 30
values | license stringclasses 15
values | size int64 3 1.01M |
|---|---|---|---|---|---|
from django.conf.urls import url
from admin.nodes import views
app_name = 'admin'
urlpatterns = [
url(r'^$', views.NodeFormView.as_view(),
name='search'),
url(r'^flagged_spam$', views.NodeFlaggedSpamList.as_view(),
name='flagged-spam'),
url(r'^known_spam$', views.NodeKnownSpamList.as_view(),
name='known-spam'),
url(r'^known_ham$', views.NodeKnownHamList.as_view(),
name='known-ham'),
url(r'^(?P<guid>[a-z0-9]+)/$', views.NodeView.as_view(),
name='node'),
url(r'^(?P<guid>[a-z0-9]+)/logs/$', views.AdminNodeLogView.as_view(),
name='node-logs'),
url(r'^registration_list/$', views.RegistrationListView.as_view(),
name='registrations'),
url(r'^stuck_registration_list/$', views.StuckRegistrationListView.as_view(),
name='stuck-registrations'),
url(r'^(?P<guid>[a-z0-9]+)/update_embargo/$',
views.RegistrationUpdateEmbargoView.as_view(), name='update_embargo'),
url(r'^(?P<guid>[a-z0-9]+)/remove/$', views.NodeDeleteView.as_view(),
name='remove'),
url(r'^(?P<guid>[a-z0-9]+)/restore/$', views.NodeDeleteView.as_view(),
name='restore'),
url(r'^(?P<guid>[a-z0-9]+)/confirm_spam/$', views.NodeConfirmSpamView.as_view(),
name='confirm-spam'),
url(r'^(?P<guid>[a-z0-9]+)/confirm_ham/$', views.NodeConfirmHamView.as_view(),
name='confirm-ham'),
url(r'^(?P<guid>[a-z0-9]+)/reindex_share_node/$', views.NodeReindexShare.as_view(),
name='reindex-share-node'),
url(r'^(?P<guid>[a-z0-9]+)/reindex_elastic_node/$', views.NodeReindexElastic.as_view(),
name='reindex-elastic-node'),
url(r'^(?P<guid>[a-z0-9]+)/restart_stuck_registrations/$', views.RestartStuckRegistrationsView.as_view(),
name='restart-stuck-registrations'),
url(r'^(?P<guid>[a-z0-9]+)/remove_stuck_registrations/$', views.RemoveStuckRegistrationsView.as_view(),
name='remove-stuck-registrations'),
url(r'^(?P<guid>[a-z0-9]+)/remove_user/(?P<user_id>[a-z0-9]+)/$',
views.NodeRemoveContributorView.as_view(), name='remove_user'),
]
| pattisdr/osf.io | admin/nodes/urls.py | Python | apache-2.0 | 2,100 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.external.classad;
import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Random;
import java.util.TimeZone;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.asterix.om.base.AMutableInt32;
public class Util {
// convert escapes in-place
// the string can only shrink while converting escapes so we can safely convert in-place.
private static final Pattern OCTAL = Pattern.compile("\\\\([0-3][0-7]{0,2})");
public static boolean convertEscapes(AMutableCharArrayString text) {
boolean validStr = true;
if (text.getLength() == 0) {
return true;
}
int dest = 0;
boolean hasOctal = false;
for (int source = 0; source < text.getLength(); ++source) {
char ch = text.charAt(source);
// scan for escapes, a terminating slash cannot be an escape
if (ch == '\\' && source < text.getLength() - 1) {
++source; // skip the \ character
ch = text.charAt(source);
// The escape part should be re-validated
switch (ch) {
case 'b':
ch = '\b';
break;
case 'f':
ch = '\f';
break;
case 'n':
ch = '\n';
break;
case 'r':
ch = '\r';
break;
case 't':
ch = '\t';
break;
case '\\':
ch = '\\';
break;
default:
if (Lexer.isodigit(ch)) {
hasOctal = true;
++dest;
}
break;
}
}
if (dest == source) {
// no need to assign ch to text when we haven't seen any escapes yet.
// text[dest] = ch;
++dest;
} else {
try {
text.erase(dest);
text.setChar(dest, ch);
++dest;
--source;
} catch (Throwable th) {
th.printStackTrace();
}
}
}
if (dest < text.getLength()) {
text.erase(dest);
text.setLength(dest);
}
// silly, but to fulfull the original contract for this function
// we need to remove the last character in the string if it is a '\0'
// (earlier logic guaranteed that a '\0' can ONLY be the last character)
if (text.getLength() > 0 && (text.charAt(text.getLength() - 1) == '\0')) {
text.erase(text.getLength() - 1);
}
if (hasOctal) {
Matcher m = OCTAL.matcher(text.toString());
StringBuffer out = new StringBuffer();
while (m.find()) {
int octet = Integer.parseInt(m.group(1), 8);
if (octet == 0 || octet > 255) {
return false;
}
m.appendReplacement(out, String.valueOf((char) octet));
}
m.appendTail(out);
text.setValue(new String(out.toString().getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8));
}
return validStr;
}
public static Random initialized = new Random((new Date()).getTime());
public static int getRandomInteger() {
return initialized.nextInt();
}
public static double getRandomReal() {
return initialized.nextDouble();
}
public static int timezoneOffset(ClassAdTime clock) {
return clock.getOffset();
}
public static void getLocalTime(ClassAdTime now, ClassAdTime localtm) {
localtm.setValue(Calendar.getInstance(), now);
localtm.isAbsolute(true);
}
public static void absTimeToString(ClassAdTime atime, AMutableCharArrayString buffer) {
DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
//"yyyy-MM-dd'T'HH:mm:ss"
//2004-01-01T00:00:00+11:00
formatter.setTimeZone(TimeZone.getTimeZone("GMT"));
buffer.appendString(formatter.format(atime.getCalendar().getTime()));
buffer.appendString(
(atime.getOffset() >= 0 ? "+" : "-") + String.format("%02d", (Math.abs(atime.getOffset()) / 3600000))
+ ":" + String.format("%02d", ((Math.abs(atime.getOffset() / 60) % 60))));
}
public static void relTimeToString(long rsecs, AMutableCharArrayString buffer) {
double fractional_seconds;
int days, hrs, mins;
double secs;
if (rsecs < 0) {
buffer.appendChar('-');
rsecs = -rsecs;
}
fractional_seconds = rsecs % 1000;
days = (int) (rsecs / 1000);
hrs = days % 86400;
mins = hrs % 3600;
secs = (mins % 60) + (fractional_seconds / 1000.0);
days = days / 86400;
hrs = hrs / 3600;
mins = mins / 60;
if (days != 0) {
if (fractional_seconds == 0) {
buffer.appendString(String.format("%d+%02d:%02d:%02d", days, hrs, mins, (int) secs));
} else {
buffer.appendString(String.format("%d+%02d:%02d:%g", days, hrs, mins, secs));
}
} else if (hrs != 0) {
if (fractional_seconds == 0) {
buffer.appendString(String.format("%02d:%02d:%02d", hrs, mins, (int) secs));
} else {
buffer.appendString(String.format("%02d:%02d:%02g", hrs, mins, secs));
}
} else if (mins != 0) {
if (fractional_seconds == 0) {
buffer.appendString(String.format("%02d:%02d", mins, (int) secs));
} else {
buffer.appendString(String.format("%02d:%02g", mins, secs));
}
return;
} else {
if (fractional_seconds == 0) {
buffer.appendString(String.format("%02d", (int) secs));
} else {
buffer.appendString(String.format("%02g", secs));
}
}
}
public static void dayNumbers(int year, int month, int day, AMutableInt32 weekday, AMutableInt32 yearday) {
int fixed = fixedFromGregorian(year, month, day);
int jan1_fixed = fixedFromGregorian(year, 1, 1);
weekday.setValue(fixed % 7);
yearday.setValue(fixed - jan1_fixed);
return;
}
public static int fixedFromGregorian(int year, int month, int day) {
int fixed;
int month_adjustment;
if (month <= 2) {
month_adjustment = 0;
} else if (isLeapYear(year)) {
month_adjustment = -1;
} else {
month_adjustment = -2;
}
fixed = 365 * (year - 1) + ((year - 1) / 4) - ((year - 1) / 100) + ((year - 1) / 400)
+ ((367 * month - 362) / 12) + month_adjustment + day;
return fixed;
}
public static boolean isLeapYear(int year) {
int mod4;
int mod400;
boolean leap_year;
mod4 = year % 4;
mod400 = year % 400;
if (mod4 == 0 && mod400 != 100 && mod400 != 200 && mod400 != 300) {
leap_year = true;
} else {
leap_year = false;
}
return leap_year;
}
public static int isInf(double x) {
if (Double.isInfinite(x)) {
return (x < 0.0) ? (-1) : 1;
}
return 0;
}
public static boolean isNan(double x) {
return Double.isNaN(x);
}
}
| ty1er/incubator-asterixdb | asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Util.java | Java | apache-2.0 | 8,750 |
// Copyright 2016 LINE Corporation
//
// LINE Corporation licenses this file to you under the Apache License,
// version 2.0 (the "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
package linebot
import (
"context"
"fmt"
)
// IssueLinkToken method
// https://developers.line.me/en/reference/messaging-api/#issue-link-token
func (client *Client) IssueLinkToken(userID string) *IssueLinkTokenCall {
return &IssueLinkTokenCall{
c: client,
userID: userID,
}
}
// IssueLinkTokenCall type
type IssueLinkTokenCall struct {
c *Client
ctx context.Context
userID string
}
// WithContext method
func (call *IssueLinkTokenCall) WithContext(ctx context.Context) *IssueLinkTokenCall {
call.ctx = ctx
return call
}
// Do method
func (call *IssueLinkTokenCall) Do() (*LinkTokenResponse, error) {
endpoint := fmt.Sprintf(APIEndpointLinkToken, call.userID)
res, err := call.c.post(call.ctx, endpoint, nil)
if err != nil {
return nil, err
}
defer closeResponse(res)
return decodeToLinkTokenResponse(res)
}
| kkdai/LineBotTemplate | vendor/github.com/line/line-bot-sdk-go/v7/linebot/account_link.go | GO | apache-2.0 | 1,468 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class DummyOperator(BaseOperator):
"""
Operator that does literally nothing. It can be used to group tasks in a
DAG.
"""
ui_color = '#e8f7e4'
@apply_defaults
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
def execute(self, context):
pass
| wileeam/airflow | airflow/operators/dummy_operator.py | Python | apache-2.0 | 1,203 |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.identity.entitlement.common.dto;
public class ElementCountDTO {
private int subElementCount;
private int attributeDesignatorsElementCount;
private int attributeValueElementCount;
private int attributeSelectorElementCount;
public int getSubElementCount() {
return subElementCount;
}
public void setSubElementCount(int subElementCount) {
this.subElementCount = subElementCount;
}
public int getAttributeSelectorElementCount() {
return attributeSelectorElementCount;
}
public void setAttributeSelectorElementCount(int attributeSelectorElementCount) {
this.attributeSelectorElementCount = attributeSelectorElementCount;
}
public int getAttributeValueElementCount() {
return attributeValueElementCount;
}
public void setAttributeValueElementCount(int attributeValueElementCount) {
this.attributeValueElementCount = attributeValueElementCount;
}
public int getAttributeDesignatorsElementCount() {
return attributeDesignatorsElementCount;
}
public void setAttributeDesignatorsElementCount(int attributeDesignatorsElementCount) {
this.attributeDesignatorsElementCount = attributeDesignatorsElementCount;
}
}
| dharshanaw/carbon-identity-framework | components/entitlement/org.wso2.carbon.identity.entitlement.common/src/main/java/org/wso2/carbon/identity/entitlement/common/dto/ElementCountDTO.java | Java | apache-2.0 | 1,931 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.distributedlog.common.concurrent;
import java.util.LinkedList;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.RejectedExecutionException;
import java.util.function.Supplier;
import javax.annotation.concurrent.GuardedBy;
import org.apache.bookkeeper.common.concurrent.FutureUtils;
import org.apache.distributedlog.common.util.Permit;
/**
* An AsyncSemaphore is a traditional semaphore but with asynchronous
* execution.
*
* <p>Grabbing a permit returns a `Future[Permit]`.
*
* <p>Basic usage:
* {{{
* val semaphore = new AsyncSemaphore(n)
* ...
* semaphore.acquireAndRun() {
* somethingThatReturnsFutureT()
* }
* }}}
*
* <p>Calls to acquire() and acquireAndRun are serialized, and tickets are
* given out fairly (in order of arrival).
*/
public class AsyncSemaphore {
private final Optional<Integer> maxWaiters;
private final Permit semaphorePermit = new Permit() {
@Override
public void release() {
releasePermit(this);
}
};
@GuardedBy("this")
private Optional<Throwable> closed = Optional.empty();
@GuardedBy("this")
private final LinkedList<CompletableFuture<Permit>> waitq;
@GuardedBy("this")
private int availablePermits;
public AsyncSemaphore(int initialPermits,
Optional<Integer> maxWaiters) {
this.availablePermits = initialPermits;
this.waitq = new LinkedList<>();
this.maxWaiters = maxWaiters;
}
private synchronized void releasePermit(Permit permit) {
CompletableFuture<Permit> next = waitq.pollFirst();
if (null != next) {
next.complete(permit);
} else {
availablePermits += 1;
}
}
private CompletableFuture<Permit> newFuturePermit() {
return FutureUtils.value(semaphorePermit);
}
/**
* Acquire a [[Permit]], asynchronously.
*
* <p>Be sure to `permit.release()` in a
* - `finally` block of your `onSuccess` callback
* - `ensure` block of your future chain
*
* <p>Interrupting this future is only advisory, and will not release the permit
* if the future has already been satisfied.
*
* @note This method always return the same instance of [[Permit]].
* @return a `Future[Permit]` when the `Future` is satisfied, computation can proceed,
* or a Future.Exception[RejectedExecutionException]` if the configured maximum
* number of waiters would be exceeded.
*/
public synchronized CompletableFuture<Permit> acquire() {
if (closed.isPresent()) {
return FutureUtils.exception(closed.get());
}
if (availablePermits > 0) {
availablePermits -= 1;
return newFuturePermit();
} else {
if (maxWaiters.isPresent() && waitq.size() >= maxWaiters.get()) {
return FutureUtils.exception(new RejectedExecutionException("Max waiters exceeded"));
} else {
CompletableFuture<Permit> future = FutureUtils.createFuture();
future.whenComplete((value, cause) -> {
synchronized (AsyncSemaphore.this) {
waitq.remove(future);
}
});
waitq.addLast(future);
return future;
}
}
}
/**
* Fail the semaphore and stop it from distributing further permits. Subsequent
* attempts to acquire a permit fail with `exc`. This semaphore's queued waiters
* are also failed with `exc`.
*/
public synchronized void fail(Throwable exc) {
closed = Optional.of(exc);
for (CompletableFuture<Permit> future : waitq) {
future.cancel(true);
}
waitq.clear();
}
/**
* Execute the function asynchronously when a permit becomes available.
*
* <p>If the function throws a non-fatal exception, the exception is returned as part of the Future.
* For all exceptions, the permit would be released before returning.
*
* @return a Future[T] equivalent to the return value of the input function. If the configured
* maximum value of waitq is reached, Future.Exception[RejectedExecutionException] is
* returned.
*/
public <T> CompletableFuture<T> acquireAndRun(Supplier<CompletableFuture<T>> func) {
return acquire().thenCompose(permit -> {
CompletableFuture<T> future;
try {
future = func.get();
future.whenComplete((value, cause) -> permit.release());
return future;
} catch (Throwable cause) {
permit.release();
throw cause;
}
});
}
}
| sijie/bookkeeper | stream/distributedlog/common/src/main/java/org/apache/distributedlog/common/concurrent/AsyncSemaphore.java | Java | apache-2.0 | 5,678 |
/** @file
A brief file description
@section license License
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/*
* Interfaces in this header file are experimental, undocumented and
* are subject to change even across minor releases of Traffic Server.
* None of the interfaces in this file are committed to be stable
* unless they are migrated to ts/ts.h If you require stable APIs to
* Traffic Server, DO NOT USE anything in this file.
*/
#ifndef __TS_API_EXPERIMENTAL_H__
#define __TS_API_EXPERIMENTAL_H__
#ifdef __cplusplus
extern "C" {
#endif /* __cplusplus */
typedef enum {
TS_FETCH_EVENT_EXT_HEAD_READY = -1,
TS_FETCH_EVENT_EXT_HEAD_DONE = -2,
TS_FETCH_EVENT_EXT_BODY_READY = -3,
TS_FETCH_EVENT_EXT_BODY_DONE = -4
} TSFetchEventExt;
typedef enum {
TS_FETCH_FLAGS_NONE = 0, // do nothing
TS_FETCH_FLAGS_STREAM = 1 << 1, // enable stream IO
TS_FETCH_FLAGS_DECHUNK = 1 << 2, // dechunk body content
TS_FETCH_FLAGS_NEWLOCK = 1 << 3, // allocate new lock for fetch sm
TS_FETCH_FLAGS_NOT_INTERNAL_REQUEST = 1 << 4 // Allow this fetch to be created as a non-internal request.
} TSFetchFlags;
typedef struct tsapi_fetchsm *TSFetchSM;
/* Forward declaration of in_addr, any user of these APIs should probably
include net/netinet.h or whatever is appropriate on the platform. */
struct in_addr;
/* Cache APIs that are not yet fully supported and/or frozen nor complete. */
tsapi TSReturnCode TSCacheBufferInfoGet(TSCacheTxn txnp, uint64_t *length, uint64_t *offset);
tsapi TSCacheHttpInfo TSCacheHttpInfoCreate();
tsapi void TSCacheHttpInfoReqGet(TSCacheHttpInfo infop, TSMBuffer *bufp, TSMLoc *obj);
tsapi void TSCacheHttpInfoRespGet(TSCacheHttpInfo infop, TSMBuffer *bufp, TSMLoc *obj);
tsapi void TSCacheHttpInfoReqSet(TSCacheHttpInfo infop, TSMBuffer bufp, TSMLoc obj);
tsapi void TSCacheHttpInfoRespSet(TSCacheHttpInfo infop, TSMBuffer bufp, TSMLoc obj);
tsapi void TSCacheHttpInfoKeySet(TSCacheHttpInfo infop, TSCacheKey key);
tsapi void TSCacheHttpInfoSizeSet(TSCacheHttpInfo infop, int64_t size);
tsapi int TSCacheHttpInfoVector(TSCacheHttpInfo infop, void *data, int length);
tsapi time_t TSCacheHttpInfoReqSentTimeGet(TSCacheHttpInfo infop);
tsapi time_t TSCacheHttpInfoRespReceivedTimeGet(TSCacheHttpInfo infop);
int64_t TSCacheHttpInfoSizeGet(TSCacheHttpInfo infop);
/* Do not edit these apis, used internally */
tsapi int TSMimeHdrFieldEqual(TSMBuffer bufp, TSMLoc hdr_obj, TSMLoc field1, TSMLoc field2);
tsapi TSReturnCode TSHttpTxnHookRegisteredFor(TSHttpTxn txnp, TSHttpHookID id, TSEventFunc funcp);
/* Various HTTP "control" modes */
typedef enum {
TS_HTTP_CNTL_GET_LOGGING_MODE,
TS_HTTP_CNTL_SET_LOGGING_MODE,
TS_HTTP_CNTL_GET_INTERCEPT_RETRY_MODE,
TS_HTTP_CNTL_SET_INTERCEPT_RETRY_MODE
} TSHttpCntlType;
#define TS_HTTP_CNTL_OFF (void *)0
#define TS_HTTP_CNTL_ON (void *)1
/* usage:
void *onoff = 0;
TSHttpTxnCntl(.., TS_HTTP_CNTL_GET_LOGGING_MODE, &onoff);
if (onoff == TS_HTTP_CNTL_ON) ....
*/
tsapi TSReturnCode TSHttpTxnCntl(TSHttpTxn txnp, TSHttpCntlType cntl, void *data);
/* Protocols APIs */
tsapi void TSVConnCacheHttpInfoSet(TSVConn connp, TSCacheHttpInfo infop);
/* The rest is from the old "froze" private API include, we should consider
moving some of these over to ts/ts.h as well. TODO */
/****************************************************************************
* Test if cache ready to accept request for a specific type of data
****************************************************************************/
tsapi TSReturnCode TSCacheDataTypeReady(TSCacheDataType type, int *is_ready);
/****************************************************************************
* When reenabling a txn in error, keep the connection open in case
* of keepalive.
****************************************************************************/
tsapi void TSHttpTxnClientKeepaliveSet(TSHttpTxn txnp, int set);
/****************************************************************************
* Allow to set the body of a POST request.
****************************************************************************/
tsapi void TSHttpTxnServerRequestBodySet(TSHttpTxn txnp, char *buf, int64_t buflength);
/* ===== High Resolution Time ===== */
#define TS_HRTIME_FOREVER (10 * TS_HRTIME_DECADE)
#define TS_HRTIME_DECADE (10 * TS_HRTIME_YEAR)
#define TS_HRTIME_YEAR (365 * TS_HRTIME_DAY + TS_HRTIME_DAY / 4)
#define TS_HRTIME_WEEK (7 * TS_HRTIME_DAY)
#define TS_HRTIME_DAY (24 * TS_HRTIME_HOUR)
#define TS_HRTIME_HOUR (60 * TS_HRTIME_MINUTE)
#define TS_HRTIME_MINUTE (60 * TS_HRTIME_SECOND)
#define TS_HRTIME_SECOND (1000 * TS_HRTIME_MSECOND)
#define TS_HRTIME_MSECOND (1000 * TS_HRTIME_USECOND)
#define TS_HRTIME_USECOND (1000 * TS_HRTIME_NSECOND)
#define TS_HRTIME_NSECOND (1LL)
#define TS_HRTIME_APPROX_SECONDS(_x) ((_x) >> 30) /* off by 7.3% */
#define TS_HRTIME_APPROX_FACTOR (((float)(1 << 30)) / (((float)HRTIME_SECOND)))
/*
////////////////////////////////////////////////////////////////////
//
// Map from units to ts_hrtime values
//
////////////////////////////////////////////////////////////////////
*/
#define TS_HRTIME_YEARS(_x) ((_x)*TS_HRTIME_YEAR)
#define TS_HRTIME_WEEKS(_x) ((_x)*TS_HRTIME_WEEK)
#define TS_HRTIME_DAYS(_x) ((_x)*TS_HRTIME_DAY)
#define TS_HRTIME_HOURS(_x) ((_x)*TS_HRTIME_HOUR)
#define TS_HRTIME_MINUTES(_x) ((_x)*TS_HRTIME_MINUTE)
#define TS_HRTIME_SECONDS(_x) ((_x)*TS_HRTIME_SECOND)
#define TS_HRTIME_MSECONDS(_x) ((_x)*TS_HRTIME_MSECOND)
#define TS_HRTIME_USECONDS(_x) ((_x)*TS_HRTIME_USECOND)
#define TS_HRTIME_NSECONDS(_x) ((_x)*TS_HRTIME_NSECOND)
tsapi TSReturnCode TSHttpTxnCachedRespTimeGet(TSHttpTxn txnp, time_t *resp_time);
/* ===== Cache ===== */
tsapi TSReturnCode TSCacheKeyDataTypeSet(TSCacheKey key, TSCacheDataType type);
/* ===== Utility ===== */
/****************************************************************************
* Create a random number
* Return random integer between <X> and <Y>
****************************************************************************/
tsapi unsigned int TSrandom(void);
/****************************************************************************
* Create a random double
* Return random double between <X> and <Y>
****************************************************************************/
tsapi double TSdrandom(void);
/****************************************************************************
* Return Hi-resolution current time. (int64_t)
****************************************************************************/
tsapi TSHRTime TShrtime(void);
/* ===== CacheHttpInfo ===== */
tsapi TSCacheHttpInfo TSCacheHttpInfoCopy(TSCacheHttpInfo infop);
tsapi void TSCacheHttpInfoReqGet(TSCacheHttpInfo infop, TSMBuffer *bufp, TSMLoc *offset);
tsapi void TSCacheHttpInfoRespGet(TSCacheHttpInfo infop, TSMBuffer *bufp, TSMLoc *offset);
tsapi void TSCacheHttpInfoDestroy(TSCacheHttpInfo infop);
/* Get Arbitrary Txn info such as cache lookup details etc as defined in TSHttpTxnInfoKey */
/**
Return the particular txn info requested.
@param txnp the transaction pointer
@param key the requested txn info.
@param TSMgmtInt a pointer to a integer where the return value is stored
@return @c TS_SUCCESS if the requested info is supported, TS_ERROR otherwise
*/
tsapi TSReturnCode TSHttpTxnInfoIntGet(TSHttpTxn txnp, TSHttpTxnInfoKey key, TSMgmtInt *value);
/****************************************************************************
* TSHttpTxnCacheLookupCountGet
* Return: TS_SUCESS/TS_ERROR
****************************************************************************/
tsapi TSReturnCode TSHttpTxnCacheLookupCountGet(TSHttpTxn txnp, int *lookup_count);
tsapi TSReturnCode TSHttpTxnRedirectRequest(TSHttpTxn txnp, TSMBuffer bufp, TSMLoc url_loc);
tsapi TSReturnCode TSHttpTxnServerRespIgnore(TSHttpTxn txnp);
tsapi TSReturnCode TSHttpTxnShutDown(TSHttpTxn txnp, TSEvent event);
tsapi TSReturnCode TSHttpTxnCloseAfterResponse(TSHttpTxn txnp, int should_close);
/* TS-1996: These API swill be removed after v3.4.0 is cut. Do not use them! */
tsapi TSReturnCode TSHttpTxnNewCacheLookupDo(TSHttpTxn txnp, TSMBuffer bufp, TSMLoc url_loc);
tsapi TSReturnCode TSHttpTxnSecondUrlTryLock(TSHttpTxn txnp);
/****************************************************************************
* ??
* Return ??
****************************************************************************/
tsapi int TSHttpTxnClientReqIsServerStyle(TSHttpTxn txnp);
/****************************************************************************
* ??
* Return ??
****************************************************************************/
tsapi void TSHttpTxnOverwriteExpireTime(TSHttpTxn txnp, time_t expire_time);
/****************************************************************************
* ??
* Return ??
****************************************************************************/
tsapi TSReturnCode TSHttpTxnUpdateCachedObject(TSHttpTxn txnp);
/****************************************************************************
* ??
* TODO: This returns a LookingUp_t value, we need to SDK'ify it.
****************************************************************************/
tsapi int TSHttpTxnLookingUpTypeGet(TSHttpTxn txnp);
tsapi void TSHttpTxnServerPush(TSHttpTxn txnp, const char *url, int url_len);
/**
Attempt to attach the contp continuation to sockets that have already been
opened by the traffic manager and defined as belonging to plugins (based on
records.config configuration). If a connection is successfully accepted,
the TS_EVENT_NET_ACCEPT is delivered to the continuation. The event
data will be a valid TSVConn bound to the accepted connection.
In order to configure such a socket, add the "plugin" keyword to a port
in proxy.config.http.server_ports like "8082:plugin"
Transparency/IP settings can also be defined, but a port cannot have
both the "ssl" or "plugin" keywords configured.
Need to update records.config comments on proxy.config.http.server_ports
when this option is promoted from experimental.
*/
tsapi TSReturnCode TSPluginDescriptorAccept(TSCont contp);
/**
Opens a network connection to the host specified by the 'to' sockaddr
spoofing the client addr to equal the 'from' sockaddr.
If the connection is successfully opened, contp
is called back with the event TS_EVENT_NET_CONNECT and the new
network vconnection will be passed in the event data parameter.
If the connection is not successful, contp is called back with
the event TS_EVENT_NET_CONNECT_FAILED.
Note: It is possible to receive TS_EVENT_NET_CONNECT
even if the connection failed, because of the implementation of
network sockets in the underlying operating system. There is an
exception: if a plugin tries to open a connection to a port on
its own host machine, then TS_EVENT_NET_CONNECT is sent only
if the connection is successfully opened. In general, however,
your plugin needs to look for an TS_EVENT_VCONN_WRITE_READY to
be sure that the connection is successfully opened.
@return TSAction which allows you to check if the connection is complete,
or cancel the attempt to connect.
*/
tsapi TSAction TSNetConnectTransparent(
TSCont contp, /**< continuation that is called back when the attempted net connection either succeeds or fails. */
struct sockaddr const *from, /**< Address to spoof as connection origin */
struct sockaddr const *to /**< Address to which to connect. */
);
/* ===== Matcher Utils ===== */
#define TS_MATCHER_LINE_INVALID 0
typedef struct tsapi_matcheline *TSMatcherLine;
/****************************************************************************
* ??
* Return
****************************************************************************/
tsapi char *TSMatcherReadIntoBuffer(char *file_name, int *file_len);
/****************************************************************************
* ??
* Return
****************************************************************************/
tsapi char *TSMatcherTokLine(char *buffer, char **last);
/****************************************************************************
* ??
* Return
****************************************************************************/
tsapi char *TSMatcherExtractIPRange(char *match_str, uint32_t *addr1, uint32_t *addr2);
/****************************************************************************
* ??
* Return
****************************************************************************/
tsapi TSMatcherLine TSMatcherLineCreate();
/****************************************************************************
* ??
* Return
****************************************************************************/
tsapi void TSMatcherLineDestroy(TSMatcherLine ml);
/****************************************************************************
* ??
* Return
****************************************************************************/
tsapi const char *TSMatcherParseSrcIPConfigLine(char *line, TSMatcherLine ml);
/****************************************************************************
* ??
* Return
****************************************************************************/
tsapi char *TSMatcherLineName(TSMatcherLine ml, int element);
/****************************************************************************
* ??
* Return
****************************************************************************/
tsapi char *TSMatcherLineValue(TSMatcherLine ml, int element);
/****************************************************************************
* Set a records.config integer variable
****************************************************************************/
tsapi TSReturnCode TSMgmtConfigIntSet(const char *var_name, TSMgmtInt value);
/* ----------------------------------------------------------------------
* Interfaces used by Wireless group
* ---------------------------------------------------------------------- */
#define TS_NET_EVENT_DATAGRAM_READ_COMPLETE TS_EVENT_INTERNAL_206
#define TS_NET_EVENT_DATAGRAM_READ_ERROR TS_EVENT_INTERNAL_207
#define TS_NET_EVENT_DATAGRAM_WRITE_COMPLETE TS_EVENT_INTERNAL_208
#define TS_NET_EVENT_DATAGRAM_WRITE_ERROR TS_EVENT_INTERNAL_209
#define TS_NET_EVENT_DATAGRAM_READ_READY TS_EVENT_INTERNAL_210
#define TS_NET_EVENT_DATAGRAM_OPEN TS_EVENT_INTERNAL_211
#define TS_NET_EVENT_DATAGRAM_ERROR TS_EVENT_INTERNAL_212
/**
* Extended FetchSM's AIPs
*/
/*
* Create FetchSM, this API will enable stream IO automatically.
*
* @param contp: continuation to be callbacked.
* @param method: request method.
* @param url: scheme://host[:port]/path.
* @param version: client http version, eg: "HTTP/1.1".
* @param client_addr: client addr sent to log.
* @param flags: can be bitwise OR of several TSFetchFlags.
*
* return TSFetchSM which should be destroyed by TSFetchDestroy().
*/
tsapi TSFetchSM TSFetchCreate(TSCont contp, const char *method, const char *url, const char *version,
struct sockaddr const *client_addr, int flags);
/*
* Create FetchSM, this API will enable stream IO automatically.
*
* @param fetch_sm: returned value of TSFetchCreate().
* @param name: name of header.
* @param name_len: len of name.
* @param value: value of header.
* @param name_len: len of value.
*
* return TSFetchSM which should be destroyed by TSFetchDestroy().
*/
tsapi void TSFetchHeaderAdd(TSFetchSM fetch_sm, const char *name, int name_len, const char *value, int value_len);
/*
* Write data to FetchSM
*
* @param fetch_sm: returned value of TSFetchCreate().
* @param data/len: data to be written to fetch sm.
*/
tsapi void TSFetchWriteData(TSFetchSM fetch_sm, const void *data, size_t len);
/*
* Read up to *len* bytes from FetchSM into *buf*.
*
* @param fetch_sm: returned value of TSFetchCreate().
* @param buf/len: buffer to contain data from fetch sm.
*/
tsapi ssize_t TSFetchReadData(TSFetchSM fetch_sm, void *buf, size_t len);
/*
* Lanuch FetchSM to do http request, before calling this API,
* you should append http request header into fetch sm through
* TSFetchWriteData() API
*
* @param fetch_sm: comes from returned value of TSFetchCreate().
*/
tsapi void TSFetchLaunch(TSFetchSM fetch_sm);
/*
* Destroy FetchSM
*
* @param fetch_sm: returned value of TSFetchCreate().
*/
tsapi void TSFetchDestroy(TSFetchSM fetch_sm);
/*
* Set user-defined data in FetchSM
*/
tsapi void TSFetchUserDataSet(TSFetchSM fetch_sm, void *data);
/*
* Get user-defined data in FetchSM
*/
tsapi void *TSFetchUserDataGet(TSFetchSM fetch_sm);
/*
* Get client response hdr mbuffer
*/
tsapi TSMBuffer TSFetchRespHdrMBufGet(TSFetchSM fetch_sm);
/*
* Get client response hdr mloc
*/
tsapi TSMLoc TSFetchRespHdrMLocGet(TSFetchSM fetch_sm);
#ifdef __cplusplus
}
#endif /* __cplusplus */
#endif /* __TS_API_EXPERIMENTAL_H__ */
| clearswift/trafficserver | proxy/api/ts/experimental.h | C | apache-2.0 | 17,789 |
package org.asteriskjava.pbx.agi;
import static org.junit.Assert.assertTrue;
import org.asteriskjava.pbx.agi.RateLimiter;
import org.junit.Test;
public class RateLimiterTest
{
@Test
public void test() throws InterruptedException
{
long now = System.currentTimeMillis();
RateLimiter limiter = new RateLimiter(3);
for (int i = 0; i < 15; i++)
{
limiter.acquire();
System.out.println(System.currentTimeMillis());
Thread.sleep(100);
}
// this should have taken around 5 seconds
assertTrue(System.currentTimeMillis() - now > 4000L);
}
}
| pk1057/asterisk-java | src/test/java/org/asteriskjava/pbx/agi/RateLimiterTest.java | Java | apache-2.0 | 647 |
/*
*
* Copyright 2015, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include <cassert>
#include <chrono>
#include <memory>
#include <mutex>
#include <sstream>
#include <string>
#include <thread>
#include <vector>
#include <gflags/gflags.h>
#include <grpc++/channel.h>
#include <grpc++/client_context.h>
#include <grpc++/server.h>
#include <grpc++/server_builder.h>
#include <grpc/grpc.h>
#include <grpc/support/alloc.h>
#include <grpc/support/histogram.h>
#include <grpc/support/host_port.h>
#include <grpc/support/log.h>
#include <grpc/support/time.h>
#include <gtest/gtest.h>
#include "src/core/lib/profiling/timers.h"
#include "src/proto/grpc/testing/services.grpc.pb.h"
#include "test/cpp/qps/client.h"
#include "test/cpp/qps/histogram.h"
#include "test/cpp/qps/interarrival.h"
#include "test/cpp/qps/usage_timer.h"
namespace grpc {
namespace testing {
static std::unique_ptr<BenchmarkService::Stub> BenchmarkStubCreator(
std::shared_ptr<Channel> ch) {
return BenchmarkService::NewStub(ch);
}
class SynchronousClient
: public ClientImpl<BenchmarkService::Stub, SimpleRequest> {
public:
SynchronousClient(const ClientConfig& config)
: ClientImpl<BenchmarkService::Stub, SimpleRequest>(
config, BenchmarkStubCreator) {
num_threads_ =
config.outstanding_rpcs_per_channel() * config.client_channels();
responses_.resize(num_threads_);
SetupLoadTest(config, num_threads_);
}
virtual ~SynchronousClient(){};
protected:
void WaitToIssue(int thread_idx) {
if (!closed_loop_) {
gpr_sleep_until(NextIssueTime(thread_idx));
}
}
size_t num_threads_;
std::vector<SimpleResponse> responses_;
};
class SynchronousUnaryClient GRPC_FINAL : public SynchronousClient {
public:
SynchronousUnaryClient(const ClientConfig& config)
: SynchronousClient(config) {
StartThreads(num_threads_);
}
~SynchronousUnaryClient() { EndThreads(); }
bool ThreadFunc(Histogram* histogram, size_t thread_idx) GRPC_OVERRIDE {
WaitToIssue(thread_idx);
auto* stub = channels_[thread_idx % channels_.size()].get_stub();
double start = UsageTimer::Now();
GPR_TIMER_SCOPE("SynchronousUnaryClient::ThreadFunc", 0);
grpc::ClientContext context;
grpc::Status s =
stub->UnaryCall(&context, request_, &responses_[thread_idx]);
histogram->Add((UsageTimer::Now() - start) * 1e9);
return s.ok();
}
};
class SynchronousStreamingClient GRPC_FINAL : public SynchronousClient {
public:
SynchronousStreamingClient(const ClientConfig& config)
: SynchronousClient(config) {
context_ = new grpc::ClientContext[num_threads_];
stream_ = new std::unique_ptr<
grpc::ClientReaderWriter<SimpleRequest, SimpleResponse>>[num_threads_];
for (size_t thread_idx = 0; thread_idx < num_threads_; thread_idx++) {
auto* stub = channels_[thread_idx % channels_.size()].get_stub();
stream_[thread_idx] = stub->StreamingCall(&context_[thread_idx]);
}
StartThreads(num_threads_);
}
~SynchronousStreamingClient() {
EndThreads();
for (auto stream = &stream_[0]; stream != &stream_[num_threads_];
stream++) {
if (*stream) {
(*stream)->WritesDone();
EXPECT_TRUE((*stream)->Finish().ok());
}
}
delete[] stream_;
delete[] context_;
}
bool ThreadFunc(Histogram* histogram, size_t thread_idx) GRPC_OVERRIDE {
WaitToIssue(thread_idx);
GPR_TIMER_SCOPE("SynchronousStreamingClient::ThreadFunc", 0);
double start = UsageTimer::Now();
if (stream_[thread_idx]->Write(request_) &&
stream_[thread_idx]->Read(&responses_[thread_idx])) {
histogram->Add((UsageTimer::Now() - start) * 1e9);
return true;
}
return false;
}
private:
// These are both conceptually std::vector but cannot be for old compilers
// that expect contained classes to support copy constructors
grpc::ClientContext* context_;
std::unique_ptr<grpc::ClientReaderWriter<SimpleRequest, SimpleResponse>>*
stream_;
};
std::unique_ptr<Client> CreateSynchronousUnaryClient(
const ClientConfig& config) {
return std::unique_ptr<Client>(new SynchronousUnaryClient(config));
}
std::unique_ptr<Client> CreateSynchronousStreamingClient(
const ClientConfig& config) {
return std::unique_ptr<Client>(new SynchronousStreamingClient(config));
}
} // namespace testing
} // namespace grpc
| shishaochen/TensorFlow-0.8-Win | third_party/grpc/test/cpp/qps/client_sync.cc | C++ | apache-2.0 | 5,893 |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vfs.encoding;
import com.intellij.AppTopics;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.FileDocumentManagerAdapter;
import com.intellij.openapi.fileEditor.impl.LoadTextUtil;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypes;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectLocator;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.refactoring.util.CommonRefactoringUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.messages.MessageBusConnection;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.Arrays;
public class EncodingUtil {
enum Magic8 {
ABSOLUTELY,
WELL_IF_YOU_INSIST,
NO_WAY
}
// check if file can be loaded in the encoding correctly:
// returns true if bytes on disk, converted to text with the charset, converted back to bytes matched
static Magic8 isSafeToReloadIn(@NotNull VirtualFile virtualFile, @NotNull String text, @NotNull byte[] bytes, @NotNull Charset charset) {
// file has BOM but the charset hasn't
byte[] bom = virtualFile.getBOM();
if (bom != null && !CharsetToolkit.canHaveBom(charset, bom)) return Magic8.NO_WAY;
// the charset has mandatory BOM (e.g. UTF-xx) but the file hasn't or has wrong
byte[] mandatoryBom = CharsetToolkit.getMandatoryBom(charset);
if (mandatoryBom != null && !ArrayUtil.startsWith(bytes, mandatoryBom)) return Magic8.NO_WAY;
String loaded = LoadTextUtil.getTextByBinaryPresentation(bytes, charset).toString();
String separator = FileDocumentManager.getInstance().getLineSeparator(virtualFile, null);
String toSave = StringUtil.convertLineSeparators(loaded, separator);
String failReason = LoadTextUtil.wasCharsetDetectedFromBytes(virtualFile);
if (failReason != null && CharsetToolkit.UTF8_CHARSET.equals(virtualFile.getCharset()) && !CharsetToolkit.UTF8_CHARSET.equals(charset)) {
return Magic8.NO_WAY; // can't reload utf8-autodetected file in another charset
}
byte[] bytesToSave;
try {
bytesToSave = toSave.getBytes(charset);
}
catch (UnsupportedOperationException e) {
return Magic8.NO_WAY;
}
if (bom != null && !ArrayUtil.startsWith(bytesToSave, bom)) {
bytesToSave = ArrayUtil.mergeArrays(bom, bytesToSave); // for 2-byte encodings String.getBytes(Charset) adds BOM automatically
}
return !Arrays.equals(bytesToSave, bytes) ? Magic8.NO_WAY : loaded.equals(text) ? Magic8.ABSOLUTELY : Magic8.WELL_IF_YOU_INSIST;
}
static Magic8 isSafeToConvertTo(@NotNull VirtualFile virtualFile, @NotNull String text, @NotNull byte[] bytesOnDisk, @NotNull Charset charset) {
try {
String lineSeparator = FileDocumentManager.getInstance().getLineSeparator(virtualFile, null);
String textToSave = lineSeparator.equals("\n") ? text : StringUtil.convertLineSeparators(text, lineSeparator);
Pair<Charset, byte[]> chosen = LoadTextUtil.chooseMostlyHarmlessCharset(virtualFile.getCharset(), charset, textToSave);
byte[] saved = chosen.second;
CharSequence textLoadedBack = LoadTextUtil.getTextByBinaryPresentation(saved, charset);
return !text.equals(textLoadedBack.toString()) ? Magic8.NO_WAY : Arrays.equals(saved, bytesOnDisk) ? Magic8.ABSOLUTELY : Magic8.WELL_IF_YOU_INSIST;
}
catch (UnsupportedOperationException e) { // unsupported encoding
return Magic8.NO_WAY;
}
}
static void saveIn(@NotNull final Document document,
final Editor editor,
@NotNull final VirtualFile virtualFile,
@NotNull final Charset charset) {
FileDocumentManager documentManager = FileDocumentManager.getInstance();
documentManager.saveDocument(document);
final Project project = ProjectLocator.getInstance().guessProjectForFile(virtualFile);
boolean writable = project == null ? virtualFile.isWritable() : ReadonlyStatusHandler.ensureFilesWritable(project, virtualFile);
if (!writable) {
CommonRefactoringUtil.showErrorHint(project, editor, "Cannot save the file " + virtualFile.getPresentableUrl(), "Unable to Save", null);
return;
}
// first, save the file in the new charset and then mark the file as having the correct encoding
try {
ApplicationManager.getApplication().runWriteAction(new ThrowableComputable<Object, IOException>() {
@Override
public Object compute() throws IOException {
virtualFile.setCharset(charset);
LoadTextUtil.write(project, virtualFile, virtualFile, document.getText(), document.getModificationStamp());
return null;
}
});
}
catch (IOException io) {
Messages.showErrorDialog(project, io.getMessage(), "Error Writing File");
}
EncodingProjectManagerImpl.suppressReloadDuring(() -> EncodingManager.getInstance().setEncoding(virtualFile, charset));
}
static void reloadIn(@NotNull final VirtualFile virtualFile, @NotNull final Charset charset) {
final FileDocumentManager documentManager = FileDocumentManager.getInstance();
//Project project = ProjectLocator.getInstance().guessProjectForFile(myFile);
//if (documentManager.isFileModified(myFile)) {
// int result = Messages.showDialog(project, "File is modified. Reload file anyway?", "File is Modified", new String[]{"Reload", "Cancel"}, 0, AllIcons.General.WarningDialog);
// if (result != 0) return;
//}
if (documentManager.getCachedDocument(virtualFile) == null) {
// no need to reload document
EncodingManager.getInstance().setEncoding(virtualFile, charset);
return;
}
final Disposable disposable = Disposer.newDisposable();
MessageBusConnection connection = ApplicationManager.getApplication().getMessageBus().connect(disposable);
connection.subscribe(AppTopics.FILE_DOCUMENT_SYNC, new FileDocumentManagerAdapter() {
@Override
public void beforeFileContentReload(VirtualFile file, @NotNull Document document) {
if (!file.equals(virtualFile)) return;
Disposer.dispose(disposable); // disconnect
EncodingManager.getInstance().setEncoding(file, charset);
LoadTextUtil.setCharsetWasDetectedFromBytes(file, null);
}
});
// if file was modified, the user will be asked here
try {
EncodingProjectManagerImpl.suppressReloadDuring(() -> ((VirtualFileListener)documentManager).contentsChanged(
new VirtualFileEvent(null, virtualFile, virtualFile.getName(), virtualFile.getParent())));
}
finally {
Disposer.dispose(disposable);
}
}
// returns (hardcoded charset from the file type, explanation) or (null, null) if file type does not restrict encoding
@NotNull
private static Pair<Charset, String> checkHardcodedCharsetFileType(@NotNull VirtualFile virtualFile) {
FileType fileType = virtualFile.getFileType();
if (fileType.isBinary()) return Pair.create(null, "binary file");
// in lesser IDEs all special file types are plain text so check for that first
if (fileType == FileTypes.PLAIN_TEXT) return Pair.create(null, null);
if (fileType == StdFileTypes.GUI_DESIGNER_FORM) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA GUI Designer form");
if (fileType == StdFileTypes.IDEA_MODULE) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA module file");
if (fileType == StdFileTypes.IDEA_PROJECT) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA project file");
if (fileType == StdFileTypes.IDEA_WORKSPACE) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA workspace file");
if (fileType == StdFileTypes.PROPERTIES) return Pair.create(virtualFile.getCharset(), ".properties file");
if (fileType == StdFileTypes.XML || fileType == StdFileTypes.JSPX) {
return Pair.create(virtualFile.getCharset(), "XML file");
}
return Pair.create(null, null);
}
@NotNull
// returns pair (existing charset (null means N/A); failReason: null means enabled, notnull means disabled and contains error message)
public static Pair<Charset, String> checkCanReload(@NotNull VirtualFile virtualFile) {
if (virtualFile.isDirectory()) {
return Pair.create(null, "file is a directory");
}
FileDocumentManager documentManager = FileDocumentManager.getInstance();
Document document = documentManager.getDocument(virtualFile);
if (document == null) return Pair.create(null, "binary file");
Charset charsetFromContent = ((EncodingManagerImpl)EncodingManager.getInstance()).computeCharsetFromContent(virtualFile);
Charset existing = charsetFromContent;
String failReason = LoadTextUtil.wasCharsetDetectedFromBytes(virtualFile);
if (failReason != null) {
// no point changing encoding if it was auto-detected
existing = virtualFile.getCharset();
}
else if (charsetFromContent != null) {
failReason = "hard coded in text";
}
else {
Pair<Charset, String> fileTypeCheck = checkHardcodedCharsetFileType(virtualFile);
if (fileTypeCheck.second != null) {
failReason = fileTypeCheck.second;
existing = fileTypeCheck.first;
}
}
if (failReason != null) {
return Pair.create(existing, failReason);
}
return Pair.create(virtualFile.getCharset(), null);
}
@Nullable("null means enabled, notnull means disabled and contains error message")
static String checkCanConvert(@NotNull VirtualFile virtualFile) {
if (virtualFile.isDirectory()) {
return "file is a directory";
}
String failReason = null;
Charset charsetFromContent = ((EncodingManagerImpl)EncodingManager.getInstance()).computeCharsetFromContent(virtualFile);
if (charsetFromContent != null) {
failReason = "Encoding is hard-coded in the text";
}
else {
Pair<Charset, String> check = checkHardcodedCharsetFileType(virtualFile);
if (check.second != null) {
failReason = check.second;
}
}
if (failReason != null) {
return failReason;
}
return null;
}
// null means enabled, (current charset, error description) otherwise
@Nullable
public static Pair<Charset, String> checkSomeActionEnabled(@NotNull VirtualFile selectedFile) {
String saveError = checkCanConvert(selectedFile);
if (saveError == null) return null;
Pair<Charset, String> reloadError = checkCanReload(selectedFile);
if (reloadError.second == null) return null;
return Pair.create(reloadError.first, saveError);
}
}
| idea4bsd/idea4bsd | platform/platform-impl/src/com/intellij/openapi/vfs/encoding/EncodingUtil.java | Java | apache-2.0 | 11,775 |
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from lxml import objectify, etree
from django.contrib.auth.models import Group, User
from useradmin.models import HuePermission, GroupPermission, get_default_user_group
from hadoop import cluster
from desktop.lib import fsmanager
def grant_access(username, groupname, appname):
add_permission(username, groupname, 'access', appname)
def add_permission(username, groupname, permname, appname):
user = User.objects.get(username=username)
group, created = Group.objects.get_or_create(name=groupname)
perm, created = HuePermission.objects.get_or_create(app=appname, action=permname)
GroupPermission.objects.get_or_create(group=group, hue_permission=perm)
if not user.groups.filter(name=group.name).exists():
user.groups.add(group)
user.save()
def add_to_group(username, groupname=None):
if groupname is None:
group = get_default_user_group()
assert group is not None
groupname = group.name
user = User.objects.get(username=username)
group, created = Group.objects.get_or_create(name=groupname)
if not user.groups.filter(name=group.name).exists():
user.groups.add(group)
user.save()
def remove_from_group(username, groupname):
user = User.objects.get(username=username)
group, created = Group.objects.get_or_create(name=groupname)
if user.groups.filter(name=group.name).exists():
user.groups.remove(group)
user.save()
def reformat_json(json_obj):
if isinstance(json_obj, basestring):
return json.dumps(json.loads(json_obj))
else:
return json.dumps(json_obj)
def reformat_xml(xml_obj):
if isinstance(xml_obj, basestring):
return etree.tostring(objectify.fromstring(xml_obj, etree.XMLParser(strip_cdata=False, remove_blank_text=True)))
else:
return etree.tostring(xml_obj)
def clear_sys_caches():
return cluster.clear_caches(), fsmanager.clear_cache()
def restore_sys_caches(old_caches):
cluster.restore_caches(old_caches[0])
fsmanager.restore_cache(old_caches[1]) | Peddle/hue | desktop/core/src/desktop/lib/test_utils.py | Python | apache-2.0 | 2,861 |
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef COMMON_AUDIO_FIR_FILTER_FACTORY_H_
#define COMMON_AUDIO_FIR_FILTER_FACTORY_H_
#include <string.h>
namespace webrtc {
class FIRFilter;
// Creates a filter with the given coefficients. All initial state values will
// be zeros.
// The length of the chunks fed to the filter should never be greater than
// |max_input_length|. This is needed because, when vectorizing it is
// necessary to concatenate the input after the state, and resizing this array
// dynamically is expensive.
FIRFilter* CreateFirFilter(const float* coefficients,
size_t coefficients_length,
size_t max_input_length);
} // namespace webrtc
#endif // COMMON_AUDIO_FIR_FILTER_FACTORY_H_
| wangcy6/storm_app | frame/c++/webrtc-master/common_audio/fir_filter_factory.h | C | apache-2.0 | 1,140 |
function getUrlVars() {
var vars = [], hash;
var hashes = window.location.href.slice(window.location.href.indexOf('?') + 1).split('&');
for (var i = 0; i < hashes.length; i++) {
hash = hashes[i].split('=');
vars.push(hash[0]);
vars[hash[0]] = hash[1];
}
return vars;
} | SreejithNS/com.sreejithn | www/js/geturi.js | JavaScript | apache-2.0 | 312 |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta
name="viewport"
content="width=device-width, initial-scale=1, maximum-scale=1, minimum-scale=1, user-scalable=no"
/>
<meta name="description" content="Star burst overlapping billboards." />
<meta name="cesium-sandcastle-labels" content="Showcases" />
<title>Cesium Demo</title>
<script type="text/javascript" src="../Sandcastle-header.js"></script>
<script
type="text/javascript"
src="../../../Build/CesiumUnminified/Cesium.js"
nomodule
></script>
<script type="module" src="../load-cesium-es6.js"></script>
</head>
<body
class="sandcastle-loading"
data-sandcastle-bucket="bucket-requirejs.html"
>
<style>
@import url(../templates/bucket.css);
</style>
<div id="cesiumContainer" class="fullSize"></div>
<div id="loadingOverlay"><h1>Loading...</h1></div>
<div id="toolbar">
<div id="zoomButtons"></div>
</div>
<script id="cesium_sandcastle_script">
function startup(Cesium) {
"use strict";
//Sandcastle_Begin
var viewer = new Cesium.Viewer("cesiumContainer", {
selectionIndicator: false,
});
// Add labels clustered at the same location
var numBillboards = 30;
for (var i = 0; i < numBillboards; ++i) {
var position = Cesium.Cartesian3.fromDegrees(-75.59777, 40.03883);
viewer.entities.add({
position: position,
billboard: {
image: "../images/facility.gif",
scale: 2.5,
},
label: {
text: "Label" + i,
show: false,
},
});
}
var scene = viewer.scene;
var camera = scene.camera;
var handler = new Cesium.ScreenSpaceEventHandler(scene.canvas);
handler.setInputAction(function (movement) {
// Star burst on left mouse click.
starBurst(movement.position);
}, Cesium.ScreenSpaceEventType.LEFT_CLICK);
handler.setInputAction(function (movement) {
// Remove the star burst when the mouse exits the circle or show the label of the billboard the mouse is hovering over.
updateStarBurst(movement.endPosition);
}, Cesium.ScreenSpaceEventType.MOUSE_MOVE);
camera.moveStart.addEventListener(function () {
// Reset the star burst on camera move because the lines from the center
// because the line end points rely on the screen space positions of the billboards.
undoStarBurst();
});
// State saved across mouse click and move events
var starBurstState = {
enabled: false,
pickedEntities: undefined,
billboardEyeOffsets: undefined,
labelEyeOffsets: undefined,
linePrimitive: undefined,
radius: undefined,
center: undefined,
pixelPadding: 10.0,
angleStart: 0.0,
angleEnd: Cesium.Math.PI,
maxDimension: undefined,
};
function offsetBillboard(
entity,
entityPosition,
angle,
magnitude,
lines,
billboardEyeOffsets,
labelEyeOffsets
) {
var x = magnitude * Math.cos(angle);
var y = magnitude * Math.sin(angle);
var offset = new Cesium.Cartesian2(x, y);
var drawingBufferWidth = scene.drawingBufferWidth;
var drawingBufferHeight = scene.drawingBufferHeight;
var pixelRatio = scene.pixelRatio;
var diff = Cesium.Cartesian3.subtract(
entityPosition,
camera.positionWC,
new Cesium.Cartesian3()
);
var distance = Cesium.Cartesian3.dot(camera.directionWC, diff);
var dimensions = camera.frustum.getPixelDimensions(
drawingBufferWidth,
drawingBufferHeight,
distance,
pixelRatio,
new Cesium.Cartesian2()
);
Cesium.Cartesian2.multiplyByScalar(
offset,
Cesium.Cartesian2.maximumComponent(dimensions),
offset
);
var labelOffset;
var billboardOffset = entity.billboard.eyeOffset;
var eyeOffset = new Cesium.Cartesian3(offset.x, offset.y, 0.0);
entity.billboard.eyeOffset = eyeOffset;
if (Cesium.defined(entity.label)) {
labelOffset = entity.label.eyeOffset;
entity.label.eyeOffset = new Cesium.Cartesian3(
offset.x,
offset.y,
-10.0
);
}
var endPoint = Cesium.Matrix4.multiplyByPoint(
camera.viewMatrix,
entityPosition,
new Cesium.Cartesian3()
);
Cesium.Cartesian3.add(eyeOffset, endPoint, endPoint);
Cesium.Matrix4.multiplyByPoint(
camera.inverseViewMatrix,
endPoint,
endPoint
);
lines.push(endPoint);
billboardEyeOffsets.push(billboardOffset);
labelEyeOffsets.push(labelOffset);
}
function starBurst(mousePosition) {
if (Cesium.defined(starBurstState.pickedEntities)) {
return;
}
var pickedObjects = scene.drillPick(mousePosition);
if (!Cesium.defined(pickedObjects) || pickedObjects.length < 2) {
return;
}
var billboardEntities = [];
var length = pickedObjects.length;
var i;
for (i = 0; i < length; ++i) {
var pickedObject = pickedObjects[i];
if (pickedObject.primitive instanceof Cesium.Billboard) {
billboardEntities.push(pickedObject);
}
}
if (billboardEntities.length === 0) {
return;
}
var pickedEntities = (starBurstState.pickedEntities = []);
var billboardEyeOffsets = (starBurstState.billboardEyeOffsets = []);
var labelEyeOffsets = (starBurstState.labelEyeOffsets = []);
var lines = [];
starBurstState.maxDimension = Number.NEGATIVE_INFINITY;
var angleStart = starBurstState.angleStart;
var angleEnd = starBurstState.angleEnd;
var angle = angleStart;
var angleIncrease;
var magnitude;
var magIncrease;
var maxDimension;
// Drill pick gets all of the entities under the mouse pointer.
// Find the billboards and set their pixel offsets in a circle pattern.
length = billboardEntities.length;
i = 0;
while (i < length) {
var object = billboardEntities[i];
if (pickedEntities.length === 0) {
starBurstState.center = Cesium.Cartesian3.clone(
object.primitive.position
);
}
if (!Cesium.defined(angleIncrease)) {
var width = object.primitive.width;
var height = object.primitive.height;
maxDimension =
Math.max(width, height) * object.primitive.scale +
starBurstState.pixelPadding;
magnitude = maxDimension + maxDimension * 0.5;
magIncrease = magnitude;
angleIncrease = maxDimension / magnitude;
}
offsetBillboard(
object.id,
object.primitive.position,
angle,
magnitude,
lines,
billboardEyeOffsets,
labelEyeOffsets
);
pickedEntities.push(object);
var reflectedAngle = angleEnd - angle;
if (
i + 1 < length &&
reflectedAngle - angleIncrease * 0.5 > angle + angleIncrease * 0.5
) {
object = billboardEntities[++i];
offsetBillboard(
object.id,
object.primitive.position,
reflectedAngle,
magnitude,
lines,
billboardEyeOffsets,
labelEyeOffsets
);
pickedEntities.push(object);
}
angle += angleIncrease;
if (
reflectedAngle - angleIncrease * 0.5 <
angle + angleIncrease * 0.5
) {
magnitude += magIncrease;
angle = angleStart;
angleIncrease = maxDimension / magnitude;
}
++i;
}
// Add lines from the pick center out to the translated billboard.
var instances = [];
length = lines.length;
for (i = 0; i < length; ++i) {
var pickedEntity = pickedEntities[i];
starBurstState.maxDimension = Math.max(
pickedEntity.primitive.width,
pickedEntity.primitive.height,
starBurstState.maxDimension
);
instances.push(
new Cesium.GeometryInstance({
geometry: new Cesium.SimplePolylineGeometry({
positions: [starBurstState.center, lines[i]],
arcType: Cesium.ArcType.NONE,
granularity: Cesium.Math.PI_OVER_FOUR,
}),
attributes: {
color: Cesium.ColorGeometryInstanceAttribute.fromColor(
Cesium.Color.WHITE
),
},
})
);
}
starBurstState.linePrimitive = scene.primitives.add(
new Cesium.Primitive({
geometryInstances: instances,
appearance: new Cesium.PerInstanceColorAppearance({
flat: true,
translucent: false,
}),
asynchronous: false,
})
);
viewer.selectedEntity = undefined;
starBurstState.radius = magnitude + magIncrease;
}
function updateStarBurst(mousePosition) {
if (!Cesium.defined(starBurstState.pickedEntities)) {
return;
}
if (!starBurstState.enabled) {
// For some reason we get a mousemove event on click, so
// do not show a label on the first event.
starBurstState.enabled = true;
return;
}
// Remove the star burst if the mouse exits the screen space circle.
// If the mouse is inside the circle, show the label of the billboard the mouse is hovering over.
var screenPosition = Cesium.SceneTransforms.wgs84ToWindowCoordinates(
scene,
starBurstState.center
);
var fromCenter = Cesium.Cartesian2.subtract(
mousePosition,
screenPosition,
new Cesium.Cartesian2()
);
var radius = starBurstState.radius;
if (
Cesium.Cartesian2.magnitudeSquared(fromCenter) > radius * radius ||
fromCenter.y >
3.0 * (starBurstState.maxDimension + starBurstState.pixelPadding)
) {
undoStarBurst();
} else {
showLabels(mousePosition);
}
}
function undoStarBurst() {
var pickedEntities = starBurstState.pickedEntities;
if (!Cesium.defined(pickedEntities)) {
return;
}
var billboardEyeOffsets = starBurstState.billboardEyeOffsets;
var labelEyeOffsets = starBurstState.labelEyeOffsets;
// Reset billboard and label pixel offsets.
// Hide overlapping labels.
for (var i = 0; i < pickedEntities.length; ++i) {
var entity = pickedEntities[i].id;
entity.billboard.eyeOffset = billboardEyeOffsets[i];
if (Cesium.defined(entity.label)) {
entity.label.eyeOffset = labelEyeOffsets[i];
entity.label.show = false;
}
}
// Remove lines from the scene.
// Free resources and reset state.
scene.primitives.remove(starBurstState.linePrimitive);
starBurstState.linePrimitive = undefined;
starBurstState.pickedEntities = undefined;
starBurstState.billboardEyeOffsets = undefined;
starBurstState.labelEyeOffsets = undefined;
starBurstState.radius = undefined;
starBurstState.enabled = false;
}
var currentObject;
function showLabels(mousePosition) {
var pickedObjects = scene.drillPick(mousePosition);
var pickedObject;
if (Cesium.defined(pickedObjects)) {
var length = pickedObjects.length;
for (var i = 0; i < length; ++i) {
if (pickedObjects[i].primitive instanceof Cesium.Billboard) {
pickedObject = pickedObjects[i];
break;
}
}
}
if (pickedObject !== currentObject) {
if (
Cesium.defined(pickedObject) &&
Cesium.defined(pickedObject.id.label)
) {
if (Cesium.defined(currentObject)) {
currentObject.id.label.show = false;
}
currentObject = pickedObject;
pickedObject.id.label.show = true;
} else if (Cesium.defined(currentObject)) {
currentObject.id.label.show = false;
currentObject = undefined;
}
}
}
//Sandcastle_End
Sandcastle.finishedLoading();
}
if (typeof Cesium !== "undefined") {
window.startupCalled = true;
startup(Cesium);
}
</script>
</body>
</html>
| likangning93/cesium | Apps/Sandcastle/gallery/Star Burst.html | HTML | apache-2.0 | 13,915 |
export const CREATE_COURSE = 'CREATE_COURSE';
| bluSCALE4/react-hello-world | src/actions/actionTypes.js | JavaScript | apache-2.0 | 46 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.opensoc.topology.runner;
import java.lang.reflect.Constructor;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import oi.thekraken.grok.api.Grok;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.StringUtils;
import org.apache.storm.hdfs.bolt.HdfsBolt;
import org.apache.storm.hdfs.bolt.format.DefaultFileNameFormat;
import org.apache.storm.hdfs.bolt.format.DelimitedRecordFormat;
import org.apache.storm.hdfs.bolt.format.FileNameFormat;
import org.apache.storm.hdfs.bolt.format.RecordFormat;
import org.apache.storm.hdfs.bolt.rotation.FileRotationPolicy;
import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy;
import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy.Units;
import org.apache.storm.hdfs.bolt.sync.CountSyncPolicy;
import org.apache.storm.hdfs.bolt.sync.SyncPolicy;
import org.apache.storm.hdfs.common.rotation.MoveFileAction;
import org.json.simple.JSONObject;
import storm.kafka.BrokerHosts;
import storm.kafka.KafkaSpout;
import storm.kafka.SpoutConfig;
import storm.kafka.ZkHosts;
import storm.kafka.bolt.KafkaBolt;
import backtype.storm.Config;
import backtype.storm.LocalCluster;
import backtype.storm.StormSubmitter;
import backtype.storm.generated.Grouping;
import backtype.storm.spout.RawScheme;
import backtype.storm.spout.SchemeAsMultiScheme;
import backtype.storm.topology.BoltDeclarer;
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.tuple.Fields;
import com.esotericsoftware.kryo.serializers.FieldSerializer;
import com.esotericsoftware.kryo.serializers.MapSerializer;
import com.opensoc.alerts.TelemetryAlertsBolt;
import com.opensoc.alerts.adapters.HbaseWhiteAndBlacklistAdapter;
import com.opensoc.alerts.interfaces.AlertsAdapter;
import com.opensoc.enrichment.adapters.cif.CIFHbaseAdapter;
import com.opensoc.enrichment.adapters.geo.GeoMysqlAdapter;
import com.opensoc.enrichment.adapters.host.HostFromPropertiesFileAdapter;
import com.opensoc.enrichment.adapters.whois.WhoisHBaseAdapter;
import com.opensoc.enrichment.adapters.threat.ThreatHbaseAdapter;
import com.opensoc.enrichment.common.GenericEnrichmentBolt;
import com.opensoc.enrichment.interfaces.EnrichmentAdapter;
import com.opensoc.hbase.HBaseBolt;
import com.opensoc.hbase.HBaseStreamPartitioner;
import com.opensoc.hbase.TupleTableConfig;
import com.opensoc.helpers.topology.Cli;
import com.opensoc.helpers.topology.SettingsLoader;
import com.opensoc.index.interfaces.IndexAdapter;
import com.opensoc.indexing.TelemetryIndexingBolt;
import com.opensoc.json.serialization.JSONKryoSerializer;
public abstract class TopologyRunner {
protected Configuration config;
protected TopologyBuilder builder;
protected Config conf;
protected boolean local_mode = true;
protected boolean debug = true;
protected String config_path = null;
protected String default_config_path = "OpenSOC_Configs";
protected boolean success = false;
protected Stack<String> messageComponents = new Stack<String>();
protected Stack<String> errorComponents = new Stack<String>();
protected Stack<String> alertComponents = new Stack<String>();
protected Stack<String> dataComponents = new Stack<String>();
protected Stack<String> terminalComponents = new Stack<String>();
public void initTopology(String args[], String subdir)
throws Exception {
Cli command_line = new Cli(args);
command_line.parse();
System.out.println("[OpenSOC] Starting topology deployment...");
debug = command_line.isDebug();
System.out.println("[OpenSOC] Debug mode set to: " + debug);
local_mode = command_line.isLocal_mode();
System.out.println("[OpenSOC] Local mode set to: " + local_mode);
if (command_line.getPath() != null) {
config_path = command_line.getPath();
System.out
.println("[OpenSOC] Setting config path to external config path: "
+ config_path);
} else {
config_path = default_config_path;
System.out
.println("[OpenSOC] Initializing from default internal config path: "
+ config_path);
}
String topology_conf_path = config_path + "/topologies/" + subdir
+ "/topology.conf";
String environment_identifier_path = config_path
+ "/topologies/environment_identifier.conf";
String topology_identifier_path = config_path + "/topologies/" + subdir
+ "/topology_identifier.conf";
System.out.println("[OpenSOC] Looking for environment identifier: "
+ environment_identifier_path);
System.out.println("[OpenSOC] Looking for topology identifier: "
+ topology_identifier_path);
System.out.println("[OpenSOC] Looking for topology config: "
+ topology_conf_path);
config = new PropertiesConfiguration(topology_conf_path);
JSONObject environment_identifier = SettingsLoader
.loadEnvironmentIdnetifier(environment_identifier_path);
JSONObject topology_identifier = SettingsLoader
.loadTopologyIdnetifier(topology_identifier_path);
String topology_name = SettingsLoader.generateTopologyName(
environment_identifier, topology_identifier);
System.out.println("[OpenSOC] Initializing Topology: " + topology_name);
builder = new TopologyBuilder();
conf = new Config();
conf.registerSerialization(JSONObject.class, MapSerializer.class);
conf.setDebug(debug);
System.out.println("[OpenSOC] Initializing Spout: " + topology_name);
if (command_line.isGenerator_spout()) {
String component_name = config.getString("spout.test.name",
"DefaultTopologySpout");
success = initializeTestingSpout(component_name);
messageComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"spout.test");
}
if (!command_line.isGenerator_spout()) {
String component_name = config.getString("spout.kafka.name",
"DefaultTopologyKafkaSpout");
success = initializeKafkaSpout(component_name);
messageComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"spout.kafka");
}
if (config.getBoolean("bolt.parser.enabled", true)) {
String component_name = config.getString("bolt.parser.name",
"DefaultTopologyParserBot");
success = initializeParsingBolt(topology_name, component_name);
messageComponents.add(component_name);
errorComponents.add(component_name);
dataComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.parser");
}
if (config.getBoolean("bolt.enrichment.geo.enabled", false)) {
String component_name = config.getString(
"bolt.enrichment.geo.name", "DefaultGeoEnrichmentBolt");
success = initializeGeoEnrichment(topology_name, component_name);
messageComponents.add(component_name);
errorComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.enrichment.geo");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"mysql");
}
if (config.getBoolean("bolt.enrichment.host.enabled", false)) {
String component_name = config.getString(
"bolt.enrichment.host.name", "DefaultHostEnrichmentBolt");
success = initializeHostsEnrichment(topology_name, component_name,
"OpenSOC_Configs/etc/whitelists/known_hosts.conf");
messageComponents.add(component_name);
errorComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.enrichment.host");
}
if (config.getBoolean("bolt.enrichment.whois.enabled", false)) {
String component_name = config.getString(
"bolt.enrichment.whois.name", "DefaultWhoisEnrichmentBolt");
success = initializeWhoisEnrichment(topology_name, component_name);
messageComponents.add(component_name);
errorComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.enrichment.whois");
}
if (config.getBoolean("bolt.enrichment.cif.enabled", false)) {
String component_name = config.getString(
"bolt.enrichment.cif.name", "DefaultCIFEnrichmentBolt");
success = initializeCIFEnrichment(topology_name, component_name);
messageComponents.add(component_name);
errorComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.enrichment.cif");
}
if (config.getBoolean("bolt.enrichment.threat.enabled", false)) {
String component_name = config.getString(
"bolt.enrichment.threat.name", "DefaultThreatEnrichmentBolt");
success = initializeThreatEnrichment(topology_name, component_name);
messageComponents.add(component_name);
errorComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.enrichment.threat");
}
if (config.getBoolean("bolt.alerts.enabled", false)) {
String component_name = config.getString("bolt.alerts.name",
"DefaultAlertsBolt");
success = initializeAlerts(topology_name, component_name,
config_path + "/topologies/" + subdir + "/alerts.xml",
environment_identifier, topology_identifier);
messageComponents.add(component_name);
errorComponents.add(component_name);
alertComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.alerts");
}
if (config.getBoolean("bolt.alerts.indexing.enabled") && config.getBoolean("bolt.alerts.enabled")) {
String component_name = config.getString(
"bolt.alerts.indexing.name", "DefaultAlertsBolt");
success = initializeAlertIndexing(component_name);
terminalComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.alerts.indexing");
}
if (config.getBoolean("bolt.kafka.enabled", false)) {
String component_name = config.getString("bolt.kafka.name",
"DefaultKafkaBolt");
success = initializeKafkaBolt(component_name);
terminalComponents.add(component_name);
System.out.println("[OpenSOC] Component " + component_name
+ " initialized");
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.kafka");
}
if (config.getBoolean("bolt.indexing.enabled", true)) {
String component_name = config.getString("bolt.indexing.name",
"DefaultIndexingBolt");
success = initializeIndexingBolt(component_name);
errorComponents.add(component_name);
terminalComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.indexing");
}
if (config.getBoolean("bolt.hdfs.enabled", false)) {
String component_name = config.getString("bolt.hdfs.name",
"DefaultHDFSBolt");
success = initializeHDFSBolt(topology_name, component_name);
terminalComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.hdfs");
}
if (config.getBoolean("bolt.error.indexing.enabled")) {
String component_name = config.getString(
"bolt.error.indexing.name", "DefaultErrorIndexingBolt");
success = initializeErrorIndexBolt(component_name);
terminalComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.error");
}
if (config.containsKey("bolt.hbase.enabled")
&& config.getBoolean("bolt.hbase.enabled")) {
String component_name = config.getString("bolt.hbase.name",
"DefaultHbaseBolt");
String shuffleType = config.getString("bolt.hbase.shuffle.type",
"direct");
success = initializeHbaseBolt(component_name, shuffleType);
terminalComponents.add(component_name);
System.out.println("[OpenSOC] ------Component " + component_name
+ " initialized with the following settings:");
SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
"bolt.hbase");
}
System.out.println("[OpenSOC] Topology Summary: ");
System.out.println("[OpenSOC] Message Stream: "
+ printComponentStream(messageComponents));
System.out.println("[OpenSOC] Alerts Stream: "
+ printComponentStream(alertComponents));
System.out.println("[OpenSOC] Error Stream: "
+ printComponentStream(errorComponents));
System.out.println("[OpenSOC] Data Stream: "
+ printComponentStream(dataComponents));
System.out.println("[OpenSOC] Terminal Components: "
+ printComponentStream(terminalComponents));
if (local_mode) {
conf.setNumWorkers(config.getInt("num.workers"));
conf.setMaxTaskParallelism(1);
LocalCluster cluster = new LocalCluster();
cluster.submitTopology(topology_name, conf,
builder.createTopology());
} else {
conf.setNumWorkers(config.getInt("num.workers"));
conf.setNumAckers(config.getInt("num.ackers"));
StormSubmitter.submitTopology(topology_name, conf,
builder.createTopology());
}
}
private String printComponentStream(List<String> messageComponents) {
StringBuilder print_string = new StringBuilder();
for (String component : messageComponents) {
print_string.append(component + " -> ");
}
print_string.append("[TERMINAL COMPONENT]");
return print_string.toString();
}
public boolean initializeHbaseBolt(String name, String shuffleType) {
try {
String messageUpstreamComponent = dataComponents.get(dataComponents
.size()-1);
System.out.println("[OpenSOC] ------" + name
+ " is initializing from " + messageUpstreamComponent);
String tableName = config.getString("bolt.hbase.table.name")
.toString();
TupleTableConfig hbaseBoltConfig = new TupleTableConfig(tableName,
config.getString("bolt.hbase.table.key.tuple.field.name")
.toString(), config.getString(
"bolt.hbase.table.timestamp.tuple.field.name")
.toString());
String allColumnFamiliesColumnQualifiers = config.getString(
"bolt.hbase.table.fields").toString();
// This is expected in the form
// "<cf1>:<cq11>,<cq12>,<cq13>|<cf2>:<cq21>,<cq22>|......."
String[] tokenizedColumnFamiliesWithColumnQualifiers = StringUtils
.split(allColumnFamiliesColumnQualifiers, "\\|");
for (String tokenizedColumnFamilyWithColumnQualifiers : tokenizedColumnFamiliesWithColumnQualifiers) {
String[] cfCqTokens = StringUtils.split(
tokenizedColumnFamilyWithColumnQualifiers, ":");
String columnFamily = cfCqTokens[0];
String[] columnQualifiers = StringUtils.split(cfCqTokens[1],
",");
for (String columnQualifier : columnQualifiers) {
hbaseBoltConfig.addColumn(columnFamily, columnQualifier);
}
// hbaseBoltConfig.setDurability(Durability.valueOf(conf.get(
// "storm.topology.pcap.bolt.hbase.durability").toString()));
hbaseBoltConfig.setBatch(Boolean.valueOf(config.getString(
"bolt.hbase.enable.batching").toString()));
HBaseBolt hbase_bolt = new HBaseBolt(hbaseBoltConfig,
config.getString("kafka.zk.list"),
config.getString("kafka.zk.port"));
hbase_bolt.setAutoAck(true);
BoltDeclarer declarer = builder.setBolt(name, hbase_bolt,
config.getInt("bolt.hbase.parallelism.hint"))
.setNumTasks(config.getInt("bolt.hbase.num.tasks"));
if (Grouping._Fields.CUSTOM_OBJECT.toString().equalsIgnoreCase(
shuffleType)) {
declarer.customGrouping(
messageUpstreamComponent,
"pcap_data_stream",
new HBaseStreamPartitioner(
hbaseBoltConfig.getTableName(),
0,
Integer.parseInt(conf
.get("bolt.hbase.partitioner.region.info.refresh.interval.mins")
.toString())));
} else if (Grouping._Fields.DIRECT.toString().equalsIgnoreCase(
shuffleType)) {
declarer.fieldsGrouping(messageUpstreamComponent,
"pcap_data_stream", new Fields("pcap_id"));
}
}
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
return true;
}
private boolean initializeErrorIndexBolt(String component_name) {
try {
Class loaded_class = Class.forName(config.getString("bolt.error.indexing.adapter"));
IndexAdapter adapter = (IndexAdapter) loaded_class.newInstance();
String dateFormat = "yyyy.MM";
if (config.containsKey("bolt.alerts.indexing.timestamp")) {
dateFormat = config.getString("bolt.alerts.indexing.timestamp");
}
TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt()
.withIndexIP(config.getString("es.ip"))
.withIndexPort(config.getInt("es.port"))
.withClusterName(config.getString("es.clustername"))
.withIndexName(
config.getString("bolt.error.indexing.indexname"))
.withDocumentName(
config.getString("bolt.error.indexing.documentname"))
.withIndexTimestamp(dateFormat)
.withBulk(config.getInt("bolt.error.indexing.bulk"))
.withIndexAdapter(adapter)
.withMetricConfiguration(config);
BoltDeclarer declarer = builder
.setBolt(
component_name,
indexing_bolt,
config.getInt("bolt.error.indexing.parallelism.hint"))
.setNumTasks(config.getInt("bolt.error.indexing.num.tasks"));
for (String component : errorComponents)
declarer.shuffleGrouping(component, "error");
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
private boolean initializeKafkaSpout(String name) {
try {
BrokerHosts zk = new ZkHosts(config.getString("kafka.zk"));
String input_topic = config.getString("spout.kafka.topic");
SpoutConfig kafkaConfig = new SpoutConfig(zk, input_topic, "",
input_topic);
kafkaConfig.scheme = new SchemeAsMultiScheme(new RawScheme());
kafkaConfig.forceFromStart = Boolean.valueOf("True");
kafkaConfig.startOffsetTime = -1;
builder.setSpout(name, new KafkaSpout(kafkaConfig),
config.getInt("spout.kafka.parallelism.hint")).setNumTasks(
config.getInt("spout.kafka.num.tasks"));
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
return true;
}
abstract boolean initializeParsingBolt(String topology_name, String name);
abstract boolean initializeTestingSpout(String name);
private boolean initializeGeoEnrichment(String topology_name, String name) {
try {
String messageUpstreamComponent = messageComponents
.get(messageComponents.size() - 1);
System.out.println("[OpenSOC] ------" + name
+ " is initializing from " + messageUpstreamComponent);
String[] keys_from_settings = config.getStringArray("bolt.enrichment.geo.fields");
List<String> geo_keys = new ArrayList<String>(Arrays.asList(keys_from_settings));
GeoMysqlAdapter geo_adapter = new GeoMysqlAdapter(
config.getString("mysql.ip"), config.getInt("mysql.port"),
config.getString("mysql.username"),
config.getString("mysql.password"),
config.getString("bolt.enrichment.geo.adapter.table"));
GenericEnrichmentBolt geo_enrichment = new GenericEnrichmentBolt()
.withEnrichmentTag(
config.getString("bolt.enrichment.geo.enrichment_tag"))
.withOutputFieldName(topology_name)
.withAdapter(geo_adapter)
.withMaxTimeRetain(
config.getInt("bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES"))
.withMaxCacheSize(
config.getInt("bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM"))
.withKeys(geo_keys).withMetricConfiguration(config);
builder.setBolt(name, geo_enrichment,
config.getInt("bolt.enrichment.geo.parallelism.hint"))
.fieldsGrouping(messageUpstreamComponent, "message",
new Fields("key"))
.setNumTasks(config.getInt("bolt.enrichment.geo.num.tasks"));
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
return true;
}
private boolean initializeHostsEnrichment(String topology_name,
String name, String hosts_path) {
try {
String messageUpstreamComponent = messageComponents
.get(messageComponents.size() - 1);
System.out.println("[OpenSOC] ------" + name
+ " is initializing from " + messageUpstreamComponent);
List<String> hosts_keys = new ArrayList<String>();
hosts_keys.add(config.getString("source.ip"));
hosts_keys.add(config.getString("dest.ip"));
Map<String, JSONObject> known_hosts = SettingsLoader
.loadKnownHosts(hosts_path);
HostFromPropertiesFileAdapter host_adapter = new HostFromPropertiesFileAdapter(
known_hosts);
GenericEnrichmentBolt host_enrichment = new GenericEnrichmentBolt()
.withEnrichmentTag(
config.getString("bolt.enrichment.host.enrichment_tag"))
.withAdapter(host_adapter)
.withMaxTimeRetain(
config.getInt("bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES"))
.withMaxCacheSize(
config.getInt("bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM"))
.withOutputFieldName(topology_name).withKeys(hosts_keys)
.withMetricConfiguration(config);
builder.setBolt(name, host_enrichment,
config.getInt("bolt.enrichment.host.parallelism.hint"))
.fieldsGrouping(messageUpstreamComponent, "message",
new Fields("key"))
.setNumTasks(
config.getInt("bolt.enrichment.host.num.tasks"));
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
return true;
}
@SuppressWarnings("rawtypes")
private boolean initializeAlerts(String topology_name, String name,
String alerts_path, JSONObject environment_identifier,
JSONObject topology_identifier) {
try {
Class loaded_class = Class.forName(config.getString("bolt.alerts.adapter"));
Constructor constructor = loaded_class.getConstructor(new Class[] { Map.class});
Map<String, String> settings = SettingsLoader.getConfigOptions((PropertiesConfiguration)config, config.getString("bolt.alerts.adapter") + ".");
System.out.println("Adapter Settings: ");
SettingsLoader.printOptionalSettings(settings);
AlertsAdapter alerts_adapter = (AlertsAdapter) constructor.newInstance(settings);
String messageUpstreamComponent = messageComponents
.get(messageComponents.size() - 1);
System.out.println("[OpenSOC] ------" + name
+ " is initializing from " + messageUpstreamComponent);
JSONObject alerts_identifier = SettingsLoader
.generateAlertsIdentifier(environment_identifier,
topology_identifier);
TelemetryAlertsBolt alerts_bolt = new TelemetryAlertsBolt()
.withIdentifier(alerts_identifier).withMaxCacheSize(1000)
.withMaxTimeRetain(3600).withAlertsAdapter(alerts_adapter)
.withOutputFieldName("message")
.withMetricConfiguration(config);
builder.setBolt(name, alerts_bolt,
config.getInt("bolt.alerts.parallelism.hint"))
.fieldsGrouping(messageUpstreamComponent, "message",
new Fields("key"))
.setNumTasks(config.getInt("bolt.alerts.num.tasks"));
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
return true;
}
private boolean initializeAlertIndexing(String name) {
try{
String messageUpstreamComponent = alertComponents.get(alertComponents
.size() - 1);
System.out.println("[OpenSOC] ------" + name + " is initializing from "
+ messageUpstreamComponent);
Class loaded_class = Class.forName(config.getString("bolt.alerts.indexing.adapter"));
IndexAdapter adapter = (IndexAdapter) loaded_class.newInstance();
String dateFormat = "yyyy.MM.dd";
if (config.containsKey("bolt.alerts.indexing.timestamp")) {
dateFormat = config.getString("bolt.alerts.indexing.timestamp");
}
TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt()
.withIndexIP(config.getString("es.ip"))
.withIndexPort(config.getInt("es.port"))
.withClusterName(config.getString("es.clustername"))
.withIndexName(
config.getString("bolt.alerts.indexing.indexname"))
.withDocumentName(
config.getString("bolt.alerts.indexing.documentname"))
.withIndexTimestamp(dateFormat)
.withBulk(config.getInt("bolt.alerts.indexing.bulk"))
.withIndexAdapter(adapter)
.withMetricConfiguration(config);
String alerts_name = config.getString("bolt.alerts.indexing.name");
builder.setBolt(alerts_name, indexing_bolt,
config.getInt("bolt.indexing.parallelism.hint"))
.shuffleGrouping(messageUpstreamComponent, "alert")
.setNumTasks(config.getInt("bolt.indexing.num.tasks"));
}
catch(Exception e)
{
e.printStackTrace();
return false;
}
return true;
}
private boolean initializeKafkaBolt(String name) {
try {
String messageUpstreamComponent = messageComponents
.get(messageComponents.size() - 1);
System.out.println("[OpenSOC] ------" + name
+ " is initializing from " + messageUpstreamComponent);
Map<String, String> kafka_broker_properties = new HashMap<String, String>();
kafka_broker_properties.put("zk.connect",
config.getString("kafka.zk"));
kafka_broker_properties.put("metadata.broker.list",
config.getString("kafka.br"));
kafka_broker_properties.put("serializer.class",
"com.opensoc.json.serialization.JSONKafkaSerializer");
kafka_broker_properties.put("key.serializer.class",
"kafka.serializer.StringEncoder");
String output_topic = config.getString("bolt.kafka.topic");
conf.put("kafka.broker.properties", kafka_broker_properties);
conf.put("topic", output_topic);
builder.setBolt(name, new KafkaBolt<String, JSONObject>(),
config.getInt("bolt.kafka.parallelism.hint"))
.shuffleGrouping(messageUpstreamComponent, "message")
.setNumTasks(config.getInt("bolt.kafka.num.tasks"));
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
return true;
}
private boolean initializeWhoisEnrichment(String topology_name, String name) {
try {
String messageUpstreamComponent = messageComponents
.get(messageComponents.size() - 1);
System.out.println("[OpenSOC] ------" + name
+ " is initializing from " + messageUpstreamComponent);
String[] keys_from_settings = config.getString("bolt.enrichment.whois.fields").split(",");
List<String> whois_keys = new ArrayList<String>(Arrays.asList(keys_from_settings));
EnrichmentAdapter whois_adapter = new WhoisHBaseAdapter(
config.getString("bolt.enrichment.whois.hbase.table.name"),
config.getString("kafka.zk.list"),
config.getString("kafka.zk.port"));
GenericEnrichmentBolt whois_enrichment = new GenericEnrichmentBolt()
.withEnrichmentTag(
config.getString("bolt.enrichment.whois.enrichment_tag"))
.withOutputFieldName(topology_name)
.withAdapter(whois_adapter)
.withMaxTimeRetain(
config.getInt("bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES"))
.withMaxCacheSize(
config.getInt("bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM"))
.withKeys(whois_keys).withMetricConfiguration(config);
builder.setBolt(name, whois_enrichment,
config.getInt("bolt.enrichment.whois.parallelism.hint"))
.fieldsGrouping(messageUpstreamComponent, "message",
new Fields("key"))
.setNumTasks(
config.getInt("bolt.enrichment.whois.num.tasks"));
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
return true;
}
private boolean initializeIndexingBolt(String name) {
try {
String messageUpstreamComponent = messageComponents
.get(messageComponents.size() - 1);
System.out.println("[OpenSOC] ------" + name
+ " is initializing from " + messageUpstreamComponent);
Class loaded_class = Class.forName(config.getString("bolt.indexing.adapter"));
IndexAdapter adapter = (IndexAdapter) loaded_class.newInstance();
Map<String, String> settings = SettingsLoader.getConfigOptions((PropertiesConfiguration)config, "optional.settings.bolt.index.search.");
if(settings != null && settings.size() > 0)
{
adapter.setOptionalSettings(settings);
System.out.println("[OpenSOC] Index Bolt picket up optional settings:");
SettingsLoader.printOptionalSettings(settings);
}
// dateFormat defaults to hourly if not specified
String dateFormat = "yyyy.MM.dd.hh";
if (config.containsKey("bolt.indexing.timestamp")) {
dateFormat = config.getString("bolt.indexing.timestamp");
}
TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt()
.withIndexIP(config.getString("es.ip"))
.withIndexPort(config.getInt("es.port"))
.withClusterName(config.getString("es.clustername"))
.withIndexName(config.getString("bolt.indexing.indexname"))
.withIndexTimestamp(dateFormat)
.withDocumentName(
config.getString("bolt.indexing.documentname"))
.withBulk(config.getInt("bolt.indexing.bulk"))
.withIndexAdapter(adapter)
.withMetricConfiguration(config);
builder.setBolt(name, indexing_bolt,
config.getInt("bolt.indexing.parallelism.hint"))
.fieldsGrouping(messageUpstreamComponent, "message",
new Fields("key"))
.setNumTasks(config.getInt("bolt.indexing.num.tasks"));
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
return true;
}
private boolean initializeThreatEnrichment(String topology_name, String name) {
try {
String messageUpstreamComponent = messageComponents
.get(messageComponents.size() - 1);
System.out.println("[OpenSOC] ------" + name
+ " is initializing from " + messageUpstreamComponent);
String[] fields = config.getStringArray("bolt.enrichment.threat.fields");
List<String> threat_keys = new ArrayList<String>(Arrays.asList(fields));
GenericEnrichmentBolt threat_enrichment = new GenericEnrichmentBolt()
.withEnrichmentTag(
config.getString("bolt.enrichment.threat.enrichment_tag"))
.withAdapter(
new ThreatHbaseAdapter(config
.getString("kafka.zk.list"), config
.getString("kafka.zk.port"), config
.getString("bolt.enrichment.threat.tablename")))
.withOutputFieldName(topology_name)
.withEnrichmentTag(config.getString("bolt.enrichment.threat.enrichment_tag"))
.withKeys(threat_keys)
.withMaxTimeRetain(
config.getInt("bolt.enrichment.threat.MAX_TIME_RETAIN_MINUTES"))
.withMaxCacheSize(
config.getInt("bolt.enrichment.threat.MAX_CACHE_SIZE_OBJECTS_NUM"))
.withMetricConfiguration(config);
builder.setBolt(name, threat_enrichment,
config.getInt("bolt.enrichment.threat.parallelism.hint"))
.fieldsGrouping(messageUpstreamComponent, "message",
new Fields("key"))
.setNumTasks(config.getInt("bolt.enrichment.threat.num.tasks"));
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
return true;
}
private boolean initializeCIFEnrichment(String topology_name, String name) {
try {
String messageUpstreamComponent = messageComponents
.get(messageComponents.size() - 1);
System.out.println("[OpenSOC] ------" + name
+ " is initializing from " + messageUpstreamComponent);
List<String> cif_keys = new ArrayList<String>();
String[] ipFields = config.getStringArray("bolt.enrichment.cif.fields.ip");
cif_keys.addAll(Arrays.asList(ipFields));
String[] hostFields = config.getStringArray("bolt.enrichment.cif.fields.host");
cif_keys.addAll(Arrays.asList(hostFields));
String[] emailFields = config.getStringArray("bolt.enrichment.cif.fields.email");
cif_keys.addAll(Arrays.asList(emailFields));
GenericEnrichmentBolt cif_enrichment = new GenericEnrichmentBolt()
.withEnrichmentTag(
config.getString("bolt.enrichment.cif.enrichment_tag"))
.withAdapter(
new CIFHbaseAdapter(config
.getString("kafka.zk.list"), config
.getString("kafka.zk.port"), config
.getString("bolt.enrichment.cif.tablename")))
.withOutputFieldName(topology_name)
.withKeys(cif_keys)
.withMaxTimeRetain(
config.getInt("bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES"))
.withMaxCacheSize(
config.getInt("bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM"))
.withMetricConfiguration(config);
builder.setBolt(name, cif_enrichment,
config.getInt("bolt.enrichment.cif.parallelism.hint"))
.fieldsGrouping(messageUpstreamComponent, "message",
new Fields("key"))
.setNumTasks(config.getInt("bolt.enrichment.cif.num.tasks"));
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
return true;
}
private boolean initializeHDFSBolt(String topology_name, String name) {
try {
String messageUpstreamComponent = messageComponents
.get(messageComponents.size() - 1);
System.out.println("[OpenSOC] ------" + name
+ " is initializing from " + messageUpstreamComponent);
RecordFormat format = new DelimitedRecordFormat()
.withFieldDelimiter(
config.getString("bolt.hdfs.field.delimiter")
.toString()).withFields(
new Fields("message"));
// sync the file system after every x number of tuples
SyncPolicy syncPolicy = new CountSyncPolicy(Integer.valueOf(config
.getString("bolt.hdfs.batch.size").toString()));
// rotate files when they reach certain size
FileRotationPolicy rotationPolicy = new FileSizeRotationPolicy(
Float.valueOf(config.getString(
"bolt.hdfs.file.rotation.size.in.mb").toString()),
Units.MB);
FileNameFormat fileNameFormat = new DefaultFileNameFormat()
.withPath(config.getString("bolt.hdfs.wip.file.path")
.toString());
// Post rotate action
MoveFileAction moveFileAction = (new MoveFileAction())
.toDestination(config.getString(
"bolt.hdfs.finished.file.path").toString());
HdfsBolt hdfsBolt = new HdfsBolt()
.withFsUrl(
config.getString("bolt.hdfs.file.system.url")
.toString())
.withFileNameFormat(fileNameFormat)
.withRecordFormat(format)
.withRotationPolicy(rotationPolicy)
.withSyncPolicy(syncPolicy)
.addRotationAction(moveFileAction);
if (config.getString("bolt.hdfs.compression.codec.class") != null) {
hdfsBolt.withCompressionCodec(config.getString(
"bolt.hdfs.compression.codec.class").toString());
}
builder.setBolt(name, hdfsBolt,
config.getInt("bolt.hdfs.parallelism.hint"))
.shuffleGrouping(messageUpstreamComponent, "message")
.setNumTasks(config.getInt("bolt.hdfs.num.tasks"));
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
return true;
}
}
| OpenSOC/opensoc-streaming | OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/TopologyRunner.java | Java | apache-2.0 | 36,860 |
/***
Wrapper/Helper Class for datagrid based on jQuery Datatable Plugin
***/
var Datatable = function () {
var tableOptions; // main options
var dataTable; // datatable object
var table; // actual table jquery object
var tableContainer; // actual table container object
var tableWrapper; // actual table wrapper jquery object
var tableInitialized = false;
var ajaxParams = {}; // set filter mode
var countSelectedRecords = function() {
var selected = $('tbody > tr > td:nth-child(1) input[type="checkbox"]:checked', table).size();
var text = tableOptions.dataTable.oLanguage.sGroupActions;
if (selected > 0) {
$('.table-group-actions > span', tableWrapper).text(text.replace("_TOTAL_", selected));
} else {
$('.table-group-actions > span', tableWrapper).text("");
}
}
return {
//main function to initiate the module
init: function (options) {
if (!$().dataTable) {
return;
}
var the = this;
// default settings
options = $.extend(true, {
src: "", // actual table
filterApplyAction: "filter",
filterCancelAction: "filter_cancel",
resetGroupActionInputOnSuccess: true,
dataTable: {
"sDom" : "<'row'<'col-md-8 col-sm-12'pli><'col-md-4 col-sm-12'<'table-group-actions pull-right'>>r><'table-scrollable't><'row'<'col-md-8 col-sm-12'pli><'col-md-4 col-sm-12'>r>>", // datatable layout
"aLengthMenu": [ // set available records per page
[10, 25, 50, 100, -1],
[10, 25, 50, 100, "All"]
],
"iDisplayLength": 10, // default records per page
"oLanguage": { // language settings
"sProcessing": '<img src="' + Metronic.getGlobalImgPath() + 'loading-spinner-grey.gif"/><span> Loading...</span>',
"sLengthMenu": "<span class='seperator'>|</span>View _MENU_ records",
"sInfo": "<span class='seperator'>|</span>Found total _TOTAL_ records",
"sInfoEmpty": "No records found to show",
"sGroupActions": "_TOTAL_ records selected: ",
"sAjaxRequestGeneralError": "Could not complete request. Please check your internet connection",
"sEmptyTable": "No data available in table",
"sZeroRecords": "No matching records found",
"oPaginate": {
"sPrevious": "Prev",
"sNext": "Next",
"sPage": "Page",
"sPageOf": "of"
}
},
"aoColumnDefs" : [{ // define columns sorting options(by default all columns are sortable extept the first checkbox column)
'bSortable' : false,
'aTargets' : [ 0 ]
}],
"bAutoWidth": false, // disable fixed width and enable fluid table
"bSortCellsTop": true, // make sortable only the first row in thead
"sPaginationType": "bootstrap_extended", // pagination type(bootstrap, bootstrap_full_number or bootstrap_extended)
"bProcessing": true, // enable/disable display message box on record load
"bServerSide": true, // enable/disable server side ajax loading
"sAjaxSource": "", // define ajax source URL
"sServerMethod": "POST",
// handle ajax request
"fnServerData": function ( sSource, aoData, fnCallback, oSettings ) {
oSettings.jqXHR = $.ajax( {
"dataType": 'json',
"type": "POST",
"url": sSource,
"data": aoData,
"success": function(res, textStatus, jqXHR) {
if (res.sMessage) {
Metronic.alert({type: (res.sStatus == 'OK' ? 'success' : 'danger'), icon: (res.sStatus == 'OK' ? 'check' : 'warning'), message: res.sMessage, container: tableWrapper, place: 'prepend'});
}
if (res.sStatus) {
if (tableOptions.resetGroupActionInputOnSuccess) {
$('.table-group-action-input', tableWrapper).val("");
}
}
if ($('.group-checkable', table).size() === 1) {
$('.group-checkable', table).attr("checked", false);
$.uniform.update($('.group-checkable', table));
}
if (tableOptions.onSuccess) {
tableOptions.onSuccess.call(undefined, the);
}
fnCallback(res, textStatus, jqXHR);
},
"error": function() {
if (tableOptions.onError) {
tableOptions.onError.call(undefined, the);
}
Metronic.alert({type: 'danger', icon: 'warning', message: tableOptions.dataTable.oLanguage.sAjaxRequestGeneralError, container: tableWrapper, place: 'prepend'});
$('.dataTables_processing', tableWrapper).remove();
}
} );
},
// pass additional parameter
"fnServerParams": function ( aoData ) {
//here can be added an external ajax request parameters.
$.each(ajaxParams, function( key, value ) {
aoData.push({"name" : key, "value": value});
});
},
"fnDrawCallback": function( oSettings ) { // run some code on table redraw
if (tableInitialized === false) { // check if table has been initialized
tableInitialized = true; // set table initialized
table.show(); // display table
}
Metronic.initUniform($('input[type="checkbox"]', table)); // reinitialize uniform checkboxes on each table reload
countSelectedRecords(); // reset selected records indicator
}
}
}, options);
tableOptions = options;
// create table's jquery object
table = $(options.src);
tableContainer = table.parents(".table-container");
// apply the special class that used to restyle the default datatable
$.fn.dataTableExt.oStdClasses.sWrapper = $.fn.dataTableExt.oStdClasses.sWrapper + " dataTables_extended_wrapper";
// initialize a datatable
dataTable = table.dataTable(options.dataTable);
tableWrapper = table.parents('.dataTables_wrapper');
// modify table per page dropdown input by appliying some classes
$('.dataTables_length select', tableWrapper).addClass("form-control input-xsmall input-sm");
// build table group actions panel
if ($('.table-actions-wrapper', tableContainer).size() === 1) {
$('.table-group-actions', tableWrapper).html($('.table-actions-wrapper', tableContainer).html()); // place the panel inside the wrapper
$('.table-actions-wrapper', tableContainer).remove(); // remove the template container
}
// handle group checkboxes check/uncheck
$('.group-checkable', table).change(function () {
var set = $('tbody > tr > td:nth-child(1) input[type="checkbox"]', table);
var checked = $(this).is(":checked");
$(set).each(function () {
$(this).attr("checked", checked);
});
$.uniform.update(set);
countSelectedRecords();
});
// handle row's checkbox click
table.on('change', 'tbody > tr > td:nth-child(1) input[type="checkbox"]', function(){
countSelectedRecords();
});
// handle filter submit button click
table.on('click', '.filter-submit', function(e){
e.preventDefault();
the.setAjaxParam("sAction", tableOptions.filterApplyAction);
// get all typeable inputs
$('textarea.form-filter, select.form-filter, input.form-filter:not([type="radio"],[type="checkbox"])', table).each(function(){
the.setAjaxParam($(this).attr("name"), $(this).val());
});
// get all checkable inputs
$('input.form-filter[type="checkbox"]:checked, input.form-filter[type="radio"]:checked', table).each(function(){
the.setAjaxParam($(this).attr("name"), $(this).val());
});
dataTable.fnDraw();
});
// handle filter cancel button click
table.on('click', '.filter-cancel', function(e){
e.preventDefault();
$('textarea.form-filter, select.form-filter, input.form-filter', table).each(function(){
$(this).val("");
});
$('input.form-filter[type="checkbox"]', table).each(function(){
$(this).attr("checked", false);
});
the.clearAjaxParams();
the.setAjaxParam("sAction", tableOptions.filterCancelAction);
dataTable.fnDraw();
});
},
getSelectedRowsCount: function() {
return $('tbody > tr > td:nth-child(1) input[type="checkbox"]:checked', table).size();
},
getSelectedRows: function() {
var rows = [];
$('tbody > tr > td:nth-child(1) input[type="checkbox"]:checked', table).each(function(){
rows.push({name: $(this).attr("name"), value: $(this).val()});
});
return rows;
},
addAjaxParam: function(name, value) {
ajaxParams[name] = value;
},
setAjaxParam: function(name, value) {
ajaxParams[name] = value;
},
clearAjaxParams: function(name, value) {
ajaxParams = [];
},
getDataTable: function() {
return dataTable;
},
getTableWrapper: function() {
return tableWrapper;
},
gettableContainer: function() {
return tableContainer;
},
getTable: function() {
return table;
}
};
}; | zeickan/Infected-Engine | static/global/scripts/datatable.js | JavaScript | apache-2.0 | 11,607 |
"""Support for the Foobot indoor air quality monitor."""
import asyncio
from datetime import timedelta
import logging
import aiohttp
from foobot_async import FoobotClient
import voluptuous as vol
from homeassistant.const import (
ATTR_TEMPERATURE,
ATTR_TIME,
CONF_TOKEN,
CONF_USERNAME,
TEMP_CELSIUS,
)
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTR_HUMIDITY = "humidity"
ATTR_PM2_5 = "PM2.5"
ATTR_CARBON_DIOXIDE = "CO2"
ATTR_VOLATILE_ORGANIC_COMPOUNDS = "VOC"
ATTR_FOOBOT_INDEX = "index"
SENSOR_TYPES = {
"time": [ATTR_TIME, "s"],
"pm": [ATTR_PM2_5, "µg/m3", "mdi:cloud"],
"tmp": [ATTR_TEMPERATURE, TEMP_CELSIUS, "mdi:thermometer"],
"hum": [ATTR_HUMIDITY, "%", "mdi:water-percent"],
"co2": [ATTR_CARBON_DIOXIDE, "ppm", "mdi:periodic-table-co2"],
"voc": [ATTR_VOLATILE_ORGANIC_COMPOUNDS, "ppb", "mdi:cloud"],
"allpollu": [ATTR_FOOBOT_INDEX, "%", "mdi:percent"],
}
SCAN_INTERVAL = timedelta(minutes=10)
PARALLEL_UPDATES = 1
TIMEOUT = 10
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_TOKEN): cv.string, vol.Required(CONF_USERNAME): cv.string}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the devices associated with the account."""
token = config.get(CONF_TOKEN)
username = config.get(CONF_USERNAME)
client = FoobotClient(
token, username, async_get_clientsession(hass), timeout=TIMEOUT
)
dev = []
try:
devices = await client.get_devices()
_LOGGER.debug("The following devices were found: %s", devices)
for device in devices:
foobot_data = FoobotData(client, device["uuid"])
for sensor_type in SENSOR_TYPES:
if sensor_type == "time":
continue
foobot_sensor = FoobotSensor(foobot_data, device, sensor_type)
dev.append(foobot_sensor)
except (
aiohttp.client_exceptions.ClientConnectorError,
asyncio.TimeoutError,
FoobotClient.TooManyRequests,
FoobotClient.InternalError,
):
_LOGGER.exception("Failed to connect to foobot servers.")
raise PlatformNotReady
except FoobotClient.ClientError:
_LOGGER.error("Failed to fetch data from foobot servers.")
return
async_add_entities(dev, True)
class FoobotSensor(Entity):
"""Implementation of a Foobot sensor."""
def __init__(self, data, device, sensor_type):
"""Initialize the sensor."""
self._uuid = device["uuid"]
self.foobot_data = data
self._name = "Foobot {} {}".format(device["name"], SENSOR_TYPES[sensor_type][0])
self.type = sensor_type
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Icon to use in the frontend."""
return SENSOR_TYPES[self.type][2]
@property
def state(self):
"""Return the state of the device."""
try:
data = self.foobot_data.data[self.type]
except (KeyError, TypeError):
data = None
return data
@property
def unique_id(self):
"""Return the unique id of this entity."""
return f"{self._uuid}_{self.type}"
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return self._unit_of_measurement
async def async_update(self):
"""Get the latest data."""
await self.foobot_data.async_update()
class FoobotData(Entity):
"""Get data from Foobot API."""
def __init__(self, client, uuid):
"""Initialize the data object."""
self._client = client
self._uuid = uuid
self.data = {}
@Throttle(SCAN_INTERVAL)
async def async_update(self):
"""Get the data from Foobot API."""
interval = SCAN_INTERVAL.total_seconds()
try:
response = await self._client.get_last_data(
self._uuid, interval, interval + 1
)
except (
aiohttp.client_exceptions.ClientConnectorError,
asyncio.TimeoutError,
self._client.TooManyRequests,
self._client.InternalError,
):
_LOGGER.debug("Couldn't fetch data")
return False
_LOGGER.debug("The data response is: %s", response)
self.data = {k: round(v, 1) for k, v in response[0].items()}
return True
| leppa/home-assistant | homeassistant/components/foobot/sensor.py | Python | apache-2.0 | 4,894 |
/*
Copyright 2016 - 2017 Huawei Technologies Co., Ltd. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-git',
templateUrl: './git.component.html',
styleUrls: ['./git.component.scss']
})
export class GitComponent implements OnInit {
constructor() { }
ngOnInit() {
}
}
| victorwangyang/containerops | tenant/angular4/src/app/repository/git/git.component.ts | TypeScript | apache-2.0 | 863 |
import six
from hamcrest.core.base_matcher import Matcher
from hamcrest.core.core.isequal import equal_to
__author__ = "Jon Reid"
__copyright__ = "Copyright 2011 hamcrest.org"
__license__ = "BSD, see License.txt"
import types
def wrap_matcher(x):
"""Wraps argument in a matcher, if necessary.
:returns: the argument as-is if it is already a matcher, otherwise wrapped
in an :py:func:`~hamcrest.core.core.isequal.equal_to` matcher.
"""
if isinstance(x, Matcher):
return x
else:
return equal_to(x)
def is_matchable_type(expected_type):
if isinstance(expected_type, type):
return True
if isinstance(expected_type, six.class_types):
return True
if isinstance(expected_type, tuple) and \
expected_type and \
all(map(is_matchable_type, expected_type)):
return True
return False
| axbaretto/beam | sdks/python/.tox/py27gcp/lib/python2.7/site-packages/hamcrest/core/helpers/wrap_matcher.py | Python | apache-2.0 | 880 |
// Copyright 2012 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/parsing/rewriter.h"
#include "src/ast/ast.h"
#include "src/ast/scopes.h"
#include "src/parsing/parse-info.h"
#include "src/parsing/parser.h"
namespace v8 {
namespace internal {
class Processor final : public AstVisitor<Processor> {
public:
Processor(Isolate* isolate, DeclarationScope* closure_scope, Variable* result,
AstValueFactory* ast_value_factory)
: result_(result),
result_assigned_(false),
replacement_(nullptr),
is_set_(false),
breakable_(false),
zone_(ast_value_factory->zone()),
closure_scope_(closure_scope),
factory_(ast_value_factory) {
DCHECK_EQ(closure_scope, closure_scope->GetClosureScope());
InitializeAstVisitor(isolate);
}
Processor(Parser* parser, DeclarationScope* closure_scope, Variable* result,
AstValueFactory* ast_value_factory)
: result_(result),
result_assigned_(false),
replacement_(nullptr),
is_set_(false),
breakable_(false),
zone_(ast_value_factory->zone()),
closure_scope_(closure_scope),
factory_(ast_value_factory) {
DCHECK_EQ(closure_scope, closure_scope->GetClosureScope());
InitializeAstVisitor(parser->stack_limit());
}
void Process(ZoneList<Statement*>* statements);
bool result_assigned() const { return result_assigned_; }
Zone* zone() { return zone_; }
DeclarationScope* closure_scope() { return closure_scope_; }
AstNodeFactory* factory() { return &factory_; }
// Returns ".result = value"
Expression* SetResult(Expression* value) {
result_assigned_ = true;
VariableProxy* result_proxy = factory()->NewVariableProxy(result_);
return factory()->NewAssignment(Token::ASSIGN, result_proxy, value,
kNoSourcePosition);
}
// Inserts '.result = undefined' in front of the given statement.
Statement* AssignUndefinedBefore(Statement* s);
private:
Variable* result_;
// We are not tracking result usage via the result_'s use
// counts (we leave the accurate computation to the
// usage analyzer). Instead we simple remember if
// there was ever an assignment to result_.
bool result_assigned_;
// When visiting a node, we "return" a replacement for that node in
// [replacement_]. In many cases this will just be the original node.
Statement* replacement_;
// To avoid storing to .result all the time, we eliminate some of
// the stores by keeping track of whether or not we're sure .result
// will be overwritten anyway. This is a bit more tricky than what I
// was hoping for.
bool is_set_;
bool breakable_;
class BreakableScope final {
public:
explicit BreakableScope(Processor* processor, bool breakable = true)
: processor_(processor), previous_(processor->breakable_) {
processor->breakable_ = processor->breakable_ || breakable;
}
~BreakableScope() { processor_->breakable_ = previous_; }
private:
Processor* processor_;
bool previous_;
};
Zone* zone_;
DeclarationScope* closure_scope_;
AstNodeFactory factory_;
// Node visitors.
#define DEF_VISIT(type) void Visit##type(type* node);
AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
void VisitIterationStatement(IterationStatement* stmt);
DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
};
Statement* Processor::AssignUndefinedBefore(Statement* s) {
Expression* result_proxy = factory()->NewVariableProxy(result_);
Expression* undef = factory()->NewUndefinedLiteral(kNoSourcePosition);
Expression* assignment = factory()->NewAssignment(Token::ASSIGN, result_proxy,
undef, kNoSourcePosition);
Block* b = factory()->NewBlock(NULL, 2, false, kNoSourcePosition);
b->statements()->Add(
factory()->NewExpressionStatement(assignment, kNoSourcePosition), zone());
b->statements()->Add(s, zone());
return b;
}
void Processor::Process(ZoneList<Statement*>* statements) {
// If we're in a breakable scope (named block, iteration, or switch), we walk
// all statements. The last value producing statement before the break needs
// to assign to .result. If we're not in a breakable scope, only the last
// value producing statement in the block assigns to .result, so we can stop
// early.
for (int i = statements->length() - 1; i >= 0 && (breakable_ || !is_set_);
--i) {
Visit(statements->at(i));
statements->Set(i, replacement_);
}
}
void Processor::VisitBlock(Block* node) {
// An initializer block is the rewritten form of a variable declaration
// with initialization expressions. The initializer block contains the
// list of assignments corresponding to the initialization expressions.
// While unclear from the spec (ECMA-262, 3rd., 12.2), the value of
// a variable declaration with initialization expression is 'undefined'
// with some JS VMs: For instance, using smjs, print(eval('var x = 7'))
// returns 'undefined'. To obtain the same behavior with v8, we need
// to prevent rewriting in that case.
if (!node->ignore_completion_value()) {
BreakableScope scope(this, node->labels() != nullptr);
Process(node->statements());
}
replacement_ = node;
}
void Processor::VisitExpressionStatement(ExpressionStatement* node) {
// Rewrite : <x>; -> .result = <x>;
if (!is_set_) {
node->set_expression(SetResult(node->expression()));
is_set_ = true;
}
replacement_ = node;
}
void Processor::VisitIfStatement(IfStatement* node) {
// Rewrite both branches.
bool set_after = is_set_;
Visit(node->then_statement());
node->set_then_statement(replacement_);
bool set_in_then = is_set_;
is_set_ = set_after;
Visit(node->else_statement());
node->set_else_statement(replacement_);
replacement_ = set_in_then && is_set_ ? node : AssignUndefinedBefore(node);
is_set_ = true;
}
void Processor::VisitIterationStatement(IterationStatement* node) {
// The statement may have to produce a value, so always assign undefined
// before.
// TODO(verwaest): Omit it if we know that there's no break/continue leaving
// it early.
DCHECK(breakable_ || !is_set_);
BreakableScope scope(this);
Visit(node->body());
node->set_body(replacement_);
replacement_ = AssignUndefinedBefore(node);
is_set_ = true;
}
void Processor::VisitDoWhileStatement(DoWhileStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitWhileStatement(WhileStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitForStatement(ForStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitForInStatement(ForInStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitForOfStatement(ForOfStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitTryCatchStatement(TryCatchStatement* node) {
// Rewrite both try and catch block.
bool set_after = is_set_;
Visit(node->try_block());
node->set_try_block(static_cast<Block*>(replacement_));
bool set_in_try = is_set_;
is_set_ = set_after;
Visit(node->catch_block());
node->set_catch_block(static_cast<Block*>(replacement_));
replacement_ = is_set_ && set_in_try ? node : AssignUndefinedBefore(node);
is_set_ = true;
}
void Processor::VisitTryFinallyStatement(TryFinallyStatement* node) {
// Only rewrite finally if it could contain 'break' or 'continue'. Always
// rewrite try.
if (breakable_) {
// Only set result before a 'break' or 'continue'.
is_set_ = true;
Visit(node->finally_block());
node->set_finally_block(replacement_->AsBlock());
// Save .result value at the beginning of the finally block and restore it
// at the end again: ".backup = .result; ...; .result = .backup"
// This is necessary because the finally block does not normally contribute
// to the completion value.
CHECK_NOT_NULL(closure_scope());
Variable* backup = closure_scope()->NewTemporary(
factory()->ast_value_factory()->dot_result_string());
Expression* backup_proxy = factory()->NewVariableProxy(backup);
Expression* result_proxy = factory()->NewVariableProxy(result_);
Expression* save = factory()->NewAssignment(
Token::ASSIGN, backup_proxy, result_proxy, kNoSourcePosition);
Expression* restore = factory()->NewAssignment(
Token::ASSIGN, result_proxy, backup_proxy, kNoSourcePosition);
node->finally_block()->statements()->InsertAt(
0, factory()->NewExpressionStatement(save, kNoSourcePosition), zone());
node->finally_block()->statements()->Add(
factory()->NewExpressionStatement(restore, kNoSourcePosition), zone());
}
Visit(node->try_block());
node->set_try_block(replacement_->AsBlock());
replacement_ = is_set_ ? node : AssignUndefinedBefore(node);
is_set_ = true;
}
void Processor::VisitSwitchStatement(SwitchStatement* node) {
// The statement may have to produce a value, so always assign undefined
// before.
// TODO(verwaest): Omit it if we know that there's no break/continue leaving
// it early.
DCHECK(breakable_ || !is_set_);
BreakableScope scope(this);
// Rewrite statements in all case clauses.
ZoneList<CaseClause*>* clauses = node->cases();
for (int i = clauses->length() - 1; i >= 0; --i) {
CaseClause* clause = clauses->at(i);
Process(clause->statements());
}
replacement_ = AssignUndefinedBefore(node);
is_set_ = true;
}
void Processor::VisitContinueStatement(ContinueStatement* node) {
is_set_ = false;
replacement_ = node;
}
void Processor::VisitBreakStatement(BreakStatement* node) {
is_set_ = false;
replacement_ = node;
}
void Processor::VisitWithStatement(WithStatement* node) {
Visit(node->statement());
node->set_statement(replacement_);
replacement_ = is_set_ ? node : AssignUndefinedBefore(node);
is_set_ = true;
}
void Processor::VisitSloppyBlockFunctionStatement(
SloppyBlockFunctionStatement* node) {
Visit(node->statement());
node->set_statement(replacement_);
replacement_ = node;
}
void Processor::VisitEmptyStatement(EmptyStatement* node) {
replacement_ = node;
}
void Processor::VisitReturnStatement(ReturnStatement* node) {
is_set_ = true;
replacement_ = node;
}
void Processor::VisitDebuggerStatement(DebuggerStatement* node) {
replacement_ = node;
}
// Expressions are never visited.
#define DEF_VISIT(type) \
void Processor::Visit##type(type* expr) { UNREACHABLE(); }
EXPRESSION_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
// Declarations are never visited.
#define DEF_VISIT(type) \
void Processor::Visit##type(type* expr) { UNREACHABLE(); }
DECLARATION_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
// Assumes code has been parsed. Mutates the AST, so the AST should not
// continue to be used in the case of failure.
bool Rewriter::Rewrite(ParseInfo* info) {
FunctionLiteral* function = info->literal();
DCHECK_NOT_NULL(function);
Scope* scope = function->scope();
DCHECK_NOT_NULL(scope);
if (!scope->is_script_scope() && !scope->is_eval_scope()) return true;
DeclarationScope* closure_scope = scope->GetClosureScope();
ZoneList<Statement*>* body = function->body();
if (!body->is_empty()) {
Variable* result = closure_scope->NewTemporary(
info->ast_value_factory()->dot_result_string());
// The name string must be internalized at this point.
info->ast_value_factory()->Internalize(info->isolate());
DCHECK(!result->name().is_null());
Processor processor(info->isolate(), closure_scope, result,
info->ast_value_factory());
processor.Process(body);
// Internalize any values created during rewriting.
info->ast_value_factory()->Internalize(info->isolate());
if (processor.HasStackOverflow()) return false;
if (processor.result_assigned()) {
int pos = kNoSourcePosition;
VariableProxy* result_proxy =
processor.factory()->NewVariableProxy(result, pos);
Statement* result_statement =
processor.factory()->NewReturnStatement(result_proxy, pos);
body->Add(result_statement, info->zone());
}
}
return true;
}
bool Rewriter::Rewrite(Parser* parser, DeclarationScope* closure_scope,
DoExpression* expr, AstValueFactory* factory) {
Block* block = expr->block();
DCHECK_EQ(closure_scope, closure_scope->GetClosureScope());
DCHECK(block->scope() == nullptr ||
block->scope()->GetClosureScope() == closure_scope);
ZoneList<Statement*>* body = block->statements();
VariableProxy* result = expr->result();
Variable* result_var = result->var();
if (!body->is_empty()) {
Processor processor(parser, closure_scope, result_var, factory);
processor.Process(body);
if (processor.HasStackOverflow()) return false;
if (!processor.result_assigned()) {
AstNodeFactory* node_factory = processor.factory();
Expression* undef = node_factory->NewUndefinedLiteral(kNoSourcePosition);
Statement* completion = node_factory->NewExpressionStatement(
processor.SetResult(undef), expr->position());
body->Add(completion, factory->zone());
}
}
return true;
}
} // namespace internal
} // namespace v8
| zero-rp/miniblink49 | v8_5_7/src/parsing/rewriter.cc | C++ | apache-2.0 | 13,412 |
<!doctype html><html lang=en><head><title>Redirecting…</title><link rel=canonical href=/v1.5/zh/news/releases/1.0.x/announcing-1.0.8/><meta name=robots content="noindex"><meta charset=utf-8><meta http-equiv=refresh content="0; url=/v1.5/zh/news/releases/1.0.x/announcing-1.0.8/"></head><body><h1>Redirecting…</h1><a href=/v1.5/zh/news/releases/1.0.x/announcing-1.0.8/>Click here if you are not redirected.</a></body></html> | istio/istio.io | archive/v1.5/zh/news/announcing-1.0.8/index.html | HTML | apache-2.0 | 433 |
/**
* Copyright (C) 2009-2014 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud;
/**
* Defines what level data gets cached at.
* <p>Created by AndyLyall: 02/25/14 13:35 PM</p>
* @author Andy Lyall
* @version 2014.03 initial version
* @since 2014.03
*/
public enum VisibleScope {
/**
* Resource is visibile across the entire account
*/
ACCOUNT_GLOBAL,
/**
* Resource is visible across one whole region
*/
ACCOUNT_REGION,
/**
* Resource is visible across one whole datacenter
*/
ACCOUNT_DATACENTER
}
| OSS-TheWeatherCompany/dasein-cloud-core | src/main/java/org/dasein/cloud/VisibleScope.java | Java | apache-2.0 | 1,298 |
/*
* Copyright © 2014 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License
*/
package co.cask.cdap.cli.completer.element;
import co.cask.cdap.api.service.http.ServiceHttpEndpoint;
import co.cask.cdap.cli.CLIConfig;
import co.cask.cdap.cli.ProgramIdArgument;
import co.cask.cdap.cli.util.ArgumentParser;
import co.cask.cdap.client.ServiceClient;
import co.cask.cdap.common.NotFoundException;
import co.cask.cdap.common.UnauthorizedException;
import co.cask.cdap.proto.Id;
import co.cask.common.cli.completers.PrefixCompleter;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* Prefix completer for Http methods.
*/
public class HttpMethodPrefixCompleter extends PrefixCompleter {
private static final String PROGRAM_ID = "programId";
private static final String PATTERN = String.format("call service <%s>", PROGRAM_ID);
private final ServiceClient serviceClient;
private final EndpointCompleter completer;
private final CLIConfig cliConfig;
public HttpMethodPrefixCompleter(final ServiceClient serviceClient, final CLIConfig cliConfig,
String prefix, EndpointCompleter completer) {
super(prefix, completer);
this.cliConfig = cliConfig;
this.serviceClient = serviceClient;
this.completer = completer;
}
@Override
public int complete(String buffer, int cursor, List<CharSequence> candidates) {
Map<String, String> arguments = ArgumentParser.getArguments(buffer, PATTERN);
ProgramIdArgument programIdArgument = ArgumentParser.parseProgramId(arguments.get(PROGRAM_ID));
if (programIdArgument != null) {
Id.Service service = Id.Service.from(cliConfig.getCurrentNamespace(),
programIdArgument.getAppId(), programIdArgument.getProgramId());
completer.setEndpoints(getMethods(service));
} else {
completer.setEndpoints(Collections.<String>emptyList());
}
return super.complete(buffer, cursor, candidates);
}
public Collection<String> getMethods(Id.Service serviceId) {
Collection<String> httpMethods = Lists.newArrayList();
try {
for (ServiceHttpEndpoint endpoint : serviceClient.getEndpoints(serviceId)) {
String method = endpoint.getMethod();
if (!httpMethods.contains(method)) {
httpMethods.add(method);
}
}
} catch (IOException | UnauthorizedException | NotFoundException ignored) {
}
return httpMethods;
}
}
| chtyim/cdap | cdap-cli/src/main/java/co/cask/cdap/cli/completer/element/HttpMethodPrefixCompleter.java | Java | apache-2.0 | 3,087 |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.javaFX.fxml.refs;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.SearchScope;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlAttributeValue;
import com.intellij.util.Processor;
import com.intellij.util.QueryExecutor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.plugins.javaFX.fxml.FxmlConstants;
import org.jetbrains.plugins.javaFX.indexing.JavaFxControllerClassIndex;
import java.util.List;
/**
* User: anna
* Date: 3/29/13
*/
public class JavaFxControllerFieldSearcher implements QueryExecutor<PsiReference, ReferencesSearch.SearchParameters>{
@Override
public boolean execute(@NotNull final ReferencesSearch.SearchParameters queryParameters, @NotNull final Processor<PsiReference> consumer) {
final PsiElement elementToSearch = queryParameters.getElementToSearch();
if (elementToSearch instanceof PsiField) {
final PsiField field = (PsiField)elementToSearch;
final PsiClass containingClass = ApplicationManager.getApplication().runReadAction(new Computable<PsiClass>() {
@Override
public PsiClass compute() {
return field.getContainingClass();
}
});
if (containingClass != null) {
final String qualifiedName = ApplicationManager.getApplication().runReadAction(new Computable<String>() {
@Override
public String compute() {
return containingClass.getQualifiedName();
}
});
if (qualifiedName != null) {
Project project = PsiUtilCore.getProjectInReadAction(containingClass);
final List<PsiFile> fxmlWithController =
JavaFxControllerClassIndex.findFxmlWithController(project, qualifiedName);
for (final PsiFile file : fxmlWithController) {
ApplicationManager.getApplication().runReadAction(() -> {
final String fieldName = field.getName();
if (fieldName == null) return;
final VirtualFile virtualFile = file.getViewProvider().getVirtualFile();
final SearchScope searchScope = queryParameters.getEffectiveSearchScope();
boolean contains = searchScope instanceof LocalSearchScope ? ((LocalSearchScope)searchScope).isInScope(virtualFile) :
((GlobalSearchScope)searchScope).contains(virtualFile);
if (contains) {
file.accept(new XmlRecursiveElementVisitor() {
@Override
public void visitXmlAttributeValue(final XmlAttributeValue value) {
final PsiReference reference = value.getReference();
if (reference != null) {
final PsiElement resolve = reference.resolve();
if (resolve instanceof XmlAttributeValue) {
final PsiElement parent = resolve.getParent();
if (parent instanceof XmlAttribute) {
final XmlAttribute attribute = (XmlAttribute)parent;
if (FxmlConstants.FX_ID.equals(attribute.getName()) && fieldName.equals(attribute.getValue())) {
consumer.process(reference);
}
}
}
}
}
});
}
});
}
}
}
}
return true;
}
}
| hurricup/intellij-community | plugins/javaFX/src/org/jetbrains/plugins/javaFX/fxml/refs/JavaFxControllerFieldSearcher.java | Java | apache-2.0 | 4,457 |
// RUN: %clang_cc1 -verify -std=c++2a -fsyntax-only -triple x86_64-apple-macosx10.14.0 %s
// RUN: %clang_cc1 -verify -std=c++2a -fsyntax-only -triple x86_64-apple-macosx10.14.0 %s -fno-signed-char
// RUN: %clang_cc1 -verify -std=c++2a -fsyntax-only -triple aarch64_be-linux-gnu %s
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
# define LITTLE_END 1
#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
# define LITTLE_END 0
#else
# error "huh?"
#endif
template <class T, class V> struct is_same {
static constexpr bool value = false;
};
template <class T> struct is_same<T, T> {
static constexpr bool value = true;
};
static_assert(sizeof(int) == 4);
static_assert(sizeof(long long) == 8);
template <class To, class From>
constexpr To bit_cast(const From &from) {
static_assert(sizeof(To) == sizeof(From));
// expected-note@+9 {{cannot be represented in type 'bool'}}
#ifdef __x86_64
// expected-note@+7 {{or 'std::byte'; '__int128' is invalid}}
#endif
#ifdef __CHAR_UNSIGNED__
// expected-note@+4 2 {{indeterminate value can only initialize an object of type 'unsigned char', 'char', or 'std::byte'; 'signed char' is invalid}}
#else
// expected-note@+2 2 {{indeterminate value can only initialize an object of type 'unsigned char' or 'std::byte'; 'signed char' is invalid}}
#endif
return __builtin_bit_cast(To, from);
}
template <class Intermediate, class Init>
constexpr bool round_trip(const Init &init) {
return bit_cast<Init>(bit_cast<Intermediate>(init)) == init;
}
void test_int() {
static_assert(round_trip<unsigned>((int)-1));
static_assert(round_trip<unsigned>((int)0x12345678));
static_assert(round_trip<unsigned>((int)0x87654321));
static_assert(round_trip<unsigned>((int)0x0C05FEFE));
}
void test_array() {
constexpr unsigned char input[] = {0xCA, 0xFE, 0xBA, 0xBE};
constexpr unsigned expected = LITTLE_END ? 0xBEBAFECA : 0xCAFEBABE;
static_assert(bit_cast<unsigned>(input) == expected);
}
void test_record() {
struct int_splicer {
unsigned x;
unsigned y;
constexpr bool operator==(const int_splicer &other) const {
return other.x == x && other.y == y;
}
};
constexpr int_splicer splice{0x0C05FEFE, 0xCAFEBABE};
static_assert(bit_cast<unsigned long long>(splice) == (LITTLE_END
? 0xCAFEBABE0C05FEFE
: 0x0C05FEFECAFEBABE));
static_assert(bit_cast<int_splicer>(0xCAFEBABE0C05FEFE).x == (LITTLE_END
? 0x0C05FEFE
: 0xCAFEBABE));
static_assert(round_trip<unsigned long long>(splice));
static_assert(round_trip<long long>(splice));
struct base2 {
};
struct base3 {
unsigned z;
};
struct bases : int_splicer, base2, base3 {
unsigned doublez;
};
struct tuple4 {
unsigned x, y, z, doublez;
constexpr bool operator==(tuple4 const &other) const {
return x == other.x && y == other.y &&
z == other.z && doublez == other.doublez;
}
};
constexpr bases b = {{1, 2}, {}, {3}, 4};
constexpr tuple4 t4 = bit_cast<tuple4>(b);
static_assert(t4 == tuple4{1, 2, 3, 4});
static_assert(round_trip<tuple4>(b));
}
void test_partially_initialized() {
struct pad {
signed char x;
int y;
};
struct no_pad {
signed char x;
signed char p1, p2, p3;
int y;
};
static_assert(sizeof(pad) == sizeof(no_pad));
constexpr pad pir{4, 4};
// expected-error@+2 {{constexpr variable 'piw' must be initialized by a constant expression}}
// expected-note@+1 {{in call to 'bit_cast(pir)'}}
constexpr int piw = bit_cast<no_pad>(pir).x;
// expected-error@+2 {{constexpr variable 'bad' must be initialized by a constant expression}}
// expected-note@+1 {{in call to 'bit_cast(pir)'}}
constexpr no_pad bad = bit_cast<no_pad>(pir);
constexpr pad fine = bit_cast<pad>(no_pad{1, 2, 3, 4, 5});
static_assert(fine.x == 1 && fine.y == 5);
}
void no_bitfields() {
// FIXME!
struct S {
unsigned char x : 8;
};
struct G {
unsigned char x : 8;
};
constexpr S s{0};
// expected-error@+2 {{constexpr variable 'g' must be initialized by a constant expression}}
// expected-note@+1 {{constexpr bit_cast involving bit-field is not yet supported}}
constexpr G g = __builtin_bit_cast(G, s);
}
void array_members() {
struct S {
int ar[3];
constexpr bool operator==(const S &rhs) {
return ar[0] == rhs.ar[0] && ar[1] == rhs.ar[1] && ar[2] == rhs.ar[2];
}
};
struct G {
int a, b, c;
constexpr bool operator==(const G &rhs) {
return a == rhs.a && b == rhs.b && c == rhs.c;
}
};
constexpr S s{{1, 2, 3}};
constexpr G g = bit_cast<G>(s);
static_assert(g.a == 1 && g.b == 2 && g.c == 3);
static_assert(round_trip<G>(s));
static_assert(round_trip<S>(g));
}
void bad_types() {
union X {
int x;
};
struct G {
int g;
};
// expected-error@+2 {{constexpr variable 'g' must be initialized by a constant expression}}
// expected-note@+1 {{bit_cast from a union type is not allowed in a constant expression}}
constexpr G g = __builtin_bit_cast(G, X{0});
// expected-error@+2 {{constexpr variable 'x' must be initialized by a constant expression}}
// expected-note@+1 {{bit_cast to a union type is not allowed in a constant expression}}
constexpr X x = __builtin_bit_cast(X, G{0});
struct has_pointer {
// expected-note@+1 2 {{invalid type 'int *' is a member of 'has_pointer'}}
int *ptr;
};
// expected-error@+2 {{constexpr variable 'ptr' must be initialized by a constant expression}}
// expected-note@+1 {{bit_cast from a pointer type is not allowed in a constant expression}}
constexpr unsigned long ptr = __builtin_bit_cast(unsigned long, has_pointer{0});
// expected-error@+2 {{constexpr variable 'hptr' must be initialized by a constant expression}}
// expected-note@+1 {{bit_cast to a pointer type is not allowed in a constant expression}}
constexpr has_pointer hptr = __builtin_bit_cast(has_pointer, 0ul);
}
void backtrace() {
struct A {
// expected-note@+1 {{invalid type 'int *' is a member of 'A'}}
int *ptr;
};
struct B {
// expected-note@+1 {{invalid type 'A [10]' is a member of 'B'}}
A as[10];
};
// expected-note@+1 {{invalid type 'B' is a base of 'C'}}
struct C : B {
};
struct E {
unsigned long ar[10];
};
// expected-error@+2 {{constexpr variable 'e' must be initialized by a constant expression}}
// expected-note@+1 {{bit_cast from a pointer type is not allowed in a constant expression}}
constexpr E e = __builtin_bit_cast(E, C{});
}
void test_array_fill() {
constexpr unsigned char a[4] = {1, 2};
constexpr unsigned int i = bit_cast<unsigned int>(a);
static_assert(i == (LITTLE_END ? 0x00000201 : 0x01020000));
}
typedef decltype(nullptr) nullptr_t;
#ifdef __CHAR_UNSIGNED__
// expected-note@+5 {{indeterminate value can only initialize an object of type 'unsigned char', 'char', or 'std::byte'; 'unsigned long' is invalid}}
#else
// expected-note@+3 {{indeterminate value can only initialize an object of type 'unsigned char' or 'std::byte'; 'unsigned long' is invalid}}
#endif
// expected-error@+1 {{constexpr variable 'test_from_nullptr' must be initialized by a constant expression}}
constexpr unsigned long test_from_nullptr = __builtin_bit_cast(unsigned long, nullptr);
constexpr int test_from_nullptr_pass = (__builtin_bit_cast(unsigned char[8], nullptr), 0);
constexpr int test_to_nullptr() {
nullptr_t npt = __builtin_bit_cast(nullptr_t, 0ul);
struct indet_mem {
unsigned char data[sizeof(void *)];
};
indet_mem im = __builtin_bit_cast(indet_mem, nullptr);
nullptr_t npt2 = __builtin_bit_cast(nullptr_t, im);
return 0;
}
constexpr int ttn = test_to_nullptr();
// expected-warning@+2 {{returning reference to local temporary object}}
// expected-note@+1 {{temporary created here}}
constexpr const long &returns_local() { return 0L; }
// expected-error@+2 {{constexpr variable 'test_nullptr_bad' must be initialized by a constant expression}}
// expected-note@+1 {{read of temporary whose lifetime has ended}}
constexpr nullptr_t test_nullptr_bad = __builtin_bit_cast(nullptr_t, returns_local());
constexpr int test_indeterminate(bool read_indet) {
struct pad {
char a;
int b;
};
struct no_pad {
char a;
unsigned char p1, p2, p3;
int b;
};
pad p{1, 2};
no_pad np = bit_cast<no_pad>(p);
int tmp = np.a + np.b;
unsigned char& indet_ref = np.p1;
if (read_indet) {
// expected-note@+1 {{read of uninitialized object is not allowed in a constant expression}}
tmp = indet_ref;
}
indet_ref = 0;
return 0;
}
constexpr int run_test_indeterminate = test_indeterminate(false);
// expected-error@+2 {{constexpr variable 'run_test_indeterminate2' must be initialized by a constant expression}}
// expected-note@+1 {{in call to 'test_indeterminate(true)'}}
constexpr int run_test_indeterminate2 = test_indeterminate(true);
struct ref_mem {
const int &rm;
};
constexpr int global_int = 0;
// expected-error@+2 {{constexpr variable 'run_ref_mem' must be initialized by a constant expression}}
// expected-note@+1 {{bit_cast from a type with a reference member is not allowed in a constant expression}}
constexpr unsigned long run_ref_mem = __builtin_bit_cast(
unsigned long, ref_mem{global_int});
union u {
int im;
};
// expected-error@+2 {{constexpr variable 'run_u' must be initialized by a constant expression}}
// expected-note@+1 {{bit_cast from a union type is not allowed in a constant expression}}
constexpr int run_u = __builtin_bit_cast(int, u{32});
struct vol_mem {
volatile int x;
};
// expected-error@+2 {{constexpr variable 'run_vol_mem' must be initialized by a constant expression}}
// expected-note@+1 {{non-literal type 'vol_mem' cannot be used in a constant expression}}
constexpr int run_vol_mem = __builtin_bit_cast(int, vol_mem{43});
struct mem_ptr {
int vol_mem::*x; // expected-note{{invalid type 'int vol_mem::*' is a member of 'mem_ptr'}}
};
// expected-error@+2 {{constexpr variable 'run_mem_ptr' must be initialized by a constant expression}}
// expected-note@+1 {{bit_cast from a member pointer type is not allowed in a constant expression}}
constexpr int run_mem_ptr = __builtin_bit_cast(unsigned long, mem_ptr{nullptr});
struct A { char c; /* char padding : 8; */ short s; };
struct B { unsigned char x[4]; };
constexpr B one() {
A a = {1, 2};
return bit_cast<B>(a);
}
constexpr char good_one = one().x[0] + one().x[2] + one().x[3];
// expected-error@+2 {{constexpr variable 'bad_one' must be initialized by a constant expression}}
// expected-note@+1 {{read of uninitialized object is not allowed in a constant expression}}
constexpr char bad_one = one().x[1];
constexpr A two() {
B b = one(); // b.x[1] is indeterminate.
b.x[0] = 'a';
b.x[2] = 1;
b.x[3] = 2;
return bit_cast<A>(b);
}
constexpr short good_two = two().c + two().s;
namespace std {
enum byte : unsigned char {};
}
enum my_byte : unsigned char {};
struct pad {
char a;
int b;
};
constexpr int ok_byte = (__builtin_bit_cast(std::byte[8], pad{1, 2}), 0);
constexpr int ok_uchar = (__builtin_bit_cast(unsigned char[8], pad{1, 2}), 0);
#ifdef __CHAR_UNSIGNED__
// expected-note@+5 {{indeterminate value can only initialize an object of type 'unsigned char', 'char', or 'std::byte'; 'my_byte' is invalid}}}}
#else
// expected-note@+3 {{indeterminate value can only initialize an object of type 'unsigned char' or 'std::byte'; 'my_byte' is invalid}}
#endif
// expected-error@+1 {{constexpr variable 'bad_my_byte' must be initialized by a constant expression}}
constexpr int bad_my_byte = (__builtin_bit_cast(my_byte[8], pad{1, 2}), 0);
#ifndef __CHAR_UNSIGNED__
// expected-error@+3 {{constexpr variable 'bad_char' must be initialized by a constant expression}}
// expected-note@+2 {{indeterminate value can only initialize an object of type 'unsigned char' or 'std::byte'; 'char' is invalid}}
#endif
constexpr int bad_char = (__builtin_bit_cast(char[8], pad{1, 2}), 0);
struct pad_buffer { unsigned char data[sizeof(pad)]; };
constexpr bool test_pad_buffer() {
pad x = {1, 2};
pad_buffer y = __builtin_bit_cast(pad_buffer, x);
pad z = __builtin_bit_cast(pad, y);
return x.a == z.a && x.b == z.b;
}
static_assert(test_pad_buffer());
constexpr unsigned char identity1a = 42;
constexpr unsigned char identity1b = __builtin_bit_cast(unsigned char, identity1a);
static_assert(identity1b == 42);
struct IdentityInStruct {
unsigned char n;
};
constexpr IdentityInStruct identity2a = {42};
constexpr unsigned char identity2b = __builtin_bit_cast(unsigned char, identity2a.n);
union IdentityInUnion {
unsigned char n;
};
constexpr IdentityInUnion identity3a = {42};
constexpr unsigned char identity3b = __builtin_bit_cast(unsigned char, identity3a.n);
namespace test_bool {
constexpr bool test_bad_bool = bit_cast<bool>('A'); // expected-error {{must be initialized by a constant expression}} expected-note{{in call}}
static_assert(round_trip<signed char>(true), "");
static_assert(round_trip<unsigned char>(false), "");
static_assert(round_trip<bool>(false), "");
static_assert(round_trip<bool>((char)0), "");
static_assert(round_trip<bool>((char)1), "");
}
namespace test_long_double {
#ifdef __x86_64
constexpr __int128_t test_cast_to_int128 = bit_cast<__int128_t>((long double)0); // expected-error{{must be initialized by a constant expression}} expected-note{{in call}}
constexpr long double ld = 3.1425926539;
struct bytes {
unsigned char d[16];
};
static_assert(round_trip<bytes>(ld), "");
static_assert(round_trip<long double>(10.0L));
constexpr bool f(bool read_uninit) {
bytes b = bit_cast<bytes>(ld);
unsigned char ld_bytes[10] = {
0x0, 0x48, 0x9f, 0x49, 0xf0,
0x3c, 0x20, 0xc9, 0x0, 0x40,
};
for (int i = 0; i != 10; ++i)
if (ld_bytes[i] != b.d[i])
return false;
if (read_uninit && b.d[10]) // expected-note{{read of uninitialized object is not allowed in a constant expression}}
return false;
return true;
}
static_assert(f(/*read_uninit=*/false), "");
static_assert(f(/*read_uninit=*/true), ""); // expected-error{{static_assert expression is not an integral constant expression}} expected-note{{in call to 'f(true)'}}
constexpr bytes ld539 = {
0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0xc0, 0x86,
0x8, 0x40, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0,
};
constexpr long double fivehundredandthirtynine = 539.0;
static_assert(bit_cast<long double>(ld539) == fivehundredandthirtynine, "");
#else
static_assert(round_trip<__int128_t>(34.0L));
#endif
}
| google/llvm-propeller | clang/test/SemaCXX/constexpr-builtin-bit-cast.cpp | C++ | apache-2.0 | 14,744 |
##########################################################################
# Copyright 2015 ThoughtWorks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
module ApiV1
module Dashboard
class PipelineGroupRepresenter < ApiV1::BaseRepresenter
alias_method :pipeline_dashboard, :represented
link :self do |opts|
opts[:url_builder].pipeline_group_config_list_api_url
end
link :doc do
'https://api.gocd.org/#pipeline-groups'
end
property :getName, as: :name
collection :pipelines, embedded: true, exec_context: :decorator, decorator: PipelineRepresenter
def pipelines
pipeline_dashboard.getPipelineModels()
end
end
end
end
| stephen-murby/gocd | server/webapp/WEB-INF/rails.new/app/presenters/api_v1/dashboard/pipeline_group_representer.rb | Ruby | apache-2.0 | 1,291 |
<!-- HTML header for doxygen 1.8.8-->
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.9.1"/>
<title>DMA Friends: Member List</title>
<!--<link href="../../tabs.css" rel="stylesheet" type="text/css"/>-->
<!--<script type="text/javascript" src="../../jquery.js"></script>-->
<script type="text/javascript" src="../../dynsections.js"></script>
<link href="bootstrap3/css/bootstrap.min.css" rel="stylesheet">
<script src="bootstrap3/js/jquery-2.1.1.min.js"></script>
<script src="bootstrap3/js/bootstrap.min.js"></script>
<script type="text/javascript" src="doxy-boot.js"></script>
<link href="../../search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="../../search/searchdata.js"></script>
<script type="text/javascript" src="../../search/search.js"></script>
<script type="text/javascript">
$(document).ready(function() { init_search(); });
</script>
<link href="../../doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td style="padding-left: 0.5em;">
<div id="projectname">DMA Friends
 <span id="projectnumber">2.0</span>
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.9.1 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "../../search",false,'Search');
</script>
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="../../index.html"><span>Main Page</span></a></li>
<li><a href="../../pages.html"><span>Related Pages</span></a></li>
<li><a href="../../namespaces.html"><span>Namespaces</span></a></li>
<li class="current"><a href="../../annotated.html"><span>Classes</span></a></li>
<li>
<div id="MSearchBox" class="MSearchBoxInactive">
<span class="left">
<img id="MSearchSelect" src="../../search/mag_sel.png"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
alt=""/>
<input type="text" id="MSearchField" value="Search" accesskey="S"
onfocus="searchBox.OnSearchFieldFocus(true)"
onblur="searchBox.OnSearchFieldFocus(false)"
onkeyup="searchBox.OnSearchFieldChange(event)"/>
</span><span class="right">
<a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="../../search/close.png" alt=""/></a>
</span>
</div>
</li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li><a href="../../annotated.html"><span>Class List</span></a></li>
<li><a href="../../classes.html"><span>Class Index</span></a></li>
<li><a href="../../hierarchy.html"><span>Class Hierarchy</span></a></li>
<li><a href="../../functions.html"><span>Class Members</span></a></li>
</ul>
</div>
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div id="nav-path" class="navpath">
<ul>
<li class="navelem"><a class="el" href="../../d2/d01/namespaceDMA.html">DMA</a></li><li class="navelem"><b>Friends</b></li><li class="navelem"><b>Api</b></li><li class="navelem"><a class="el" href="../../d7/d53/classDMA_1_1Friends_1_1Api_1_1ActivityResource.html">ActivityResource</a></li> </ul>
</div>
</div><!-- top -->
<div class="header">
<div class="headertitle">
<div class="title">DMA\Friends\Api\ActivityResource Member List</div> </div>
</div><!--header-->
<div class="contents">
<p>This is the complete list of members for <a class="el" href="../../d7/d53/classDMA_1_1Friends_1_1Api_1_1ActivityResource.html">DMA\Friends\Api\ActivityResource</a>, including all inherited members.</p>
<table class="directory">
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>$model</b> (defined in <a class="el" href="../../d7/d53/classDMA_1_1Friends_1_1Api_1_1ActivityResource.html">DMA\Friends\Api\ActivityResource</a>)</td><td class="entry"><a class="el" href="../../d7/d53/classDMA_1_1Friends_1_1Api_1_1ActivityResource.html">DMA\Friends\Api\ActivityResource</a></td><td class="entry"><span class="mlabel">protected</span></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>$pageSize</b> (defined in <a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html">DMA\Friends\Api\BaseResource</a>)</td><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html">DMA\Friends\Api\BaseResource</a></td><td class="entry"><span class="mlabel">protected</span></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>$transformer</b> (defined in <a class="el" href="../../d7/d53/classDMA_1_1Friends_1_1Api_1_1ActivityResource.html">DMA\Friends\Api\ActivityResource</a>)</td><td class="entry"><a class="el" href="../../d7/d53/classDMA_1_1Friends_1_1Api_1_1ActivityResource.html">DMA\Friends\Api\ActivityResource</a></td><td class="entry"><span class="mlabel">protected</span></td></tr>
<tr><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html#a2e62ea3d73fab798a5319189b1d134e0">create</a>()</td><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html">DMA\Friends\Api\BaseResource</a></td><td class="entry"></td></tr>
<tr class="even"><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html#a630d3565e6fbec6faff59d8ab79e1e75">destroy</a>($id)</td><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html">DMA\Friends\Api\BaseResource</a></td><td class="entry"></td></tr>
<tr><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html#a9421c137919875aaf69b0192cec0737d">edit</a>($id)</td><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html">DMA\Friends\Api\BaseResource</a></td><td class="entry"></td></tr>
<tr class="even"><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html#add87ebd329590ef27490fe6b8ea6f418">index</a>()</td><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html">DMA\Friends\Api\BaseResource</a></td><td class="entry"></td></tr>
<tr><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html#a5ac5d4fb30ec5434fd685e044c357f97">show</a>($id)</td><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html">DMA\Friends\Api\BaseResource</a></td><td class="entry"></td></tr>
<tr class="even"><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html#a20877c85a440ce26e121a29b8fb91510">store</a>()</td><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html">DMA\Friends\Api\BaseResource</a></td><td class="entry"></td></tr>
<tr><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html#a67477f34c9c2d79ef575695c6c426b24">update</a>($id)</td><td class="entry"><a class="el" href="../../d9/dda/classDMA_1_1Friends_1_1Api_1_1BaseResource.html">DMA\Friends\Api\BaseResource</a></td><td class="entry"></td></tr>
</table></div><!-- contents -->
<!-- HTML footer for doxygen 1.8.8-->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated on Thu Feb 12 2015 13:36:13 for DMA Friends by  <a href="http://www.doxygen.org/index.html">
<img class="footer" src="../../doxygen.png" alt="doxygen"/>
</a> 1.8.9.1
</small></address>
</body>
</html>
| DenverArtMuseum/OctoberFriends | docs/html/db/d76/classDMA_1_1Friends_1_1Api_1_1ActivityResource-members.html | HTML | apache-2.0 | 8,545 |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.map.impl.querycache.subscriber;
import com.hazelcast.config.Config;
import com.hazelcast.config.MapConfig;
import com.hazelcast.config.QueryCacheConfig;
import com.hazelcast.internal.config.ConfigUtils;
import com.hazelcast.map.impl.querycache.QueryCacheConfigurator;
import com.hazelcast.map.impl.querycache.QueryCacheEventService;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Node side implementation of {@link QueryCacheConfigurator}.
*
* @see QueryCacheConfigurator
*/
public class NodeQueryCacheConfigurator extends AbstractQueryCacheConfigurator {
private final Config config;
public NodeQueryCacheConfigurator(Config config, ClassLoader configClassLoader,
QueryCacheEventService eventService) {
super(configClassLoader, eventService);
this.config = config;
}
@Override
public QueryCacheConfig getOrCreateConfiguration(String mapName, String cacheName, String cacheId) {
MapConfig mapConfig = config.getMapConfig(mapName);
QueryCacheConfig queryCacheConfig = findQueryCacheConfigFromMapConfig(mapConfig, cacheName);
if (queryCacheConfig != null) {
setPredicateImpl(queryCacheConfig);
setEntryListener(mapName, cacheId, queryCacheConfig);
return queryCacheConfig;
}
QueryCacheConfig newConfig = new QueryCacheConfig(cacheName);
mapConfig.getQueryCacheConfigs().add(newConfig);
return newConfig;
}
@Override
public QueryCacheConfig getOrNull(String mapName, String cacheName, String cacheId) {
MapConfig mapConfig = config.getMapConfigOrNull(mapName);
if (mapConfig == null) {
return null;
}
QueryCacheConfig queryCacheConfig = findQueryCacheConfigFromMapConfig(mapConfig, cacheName);
if (queryCacheConfig != null) {
setPredicateImpl(queryCacheConfig);
setEntryListener(mapName, cacheId, queryCacheConfig);
return queryCacheConfig;
}
return queryCacheConfig;
}
private QueryCacheConfig findQueryCacheConfigFromMapConfig(MapConfig mapConfig, String cacheName) {
List<QueryCacheConfig> queryCacheConfigs = mapConfig.getQueryCacheConfigs();
Map<String, QueryCacheConfig> allQueryCacheConfigs = new HashMap<String, QueryCacheConfig>(queryCacheConfigs.size());
for (QueryCacheConfig queryCacheConfig : queryCacheConfigs) {
allQueryCacheConfigs.put(queryCacheConfig.getName(), queryCacheConfig);
}
return ConfigUtils.lookupByPattern(config.getConfigPatternMatcher(), allQueryCacheConfigs, cacheName);
}
@Override
public void removeConfiguration(String mapName, String cacheName) {
MapConfig mapConfig = config.getMapConfig(mapName);
List<QueryCacheConfig> queryCacheConfigs = mapConfig.getQueryCacheConfigs();
if (queryCacheConfigs == null || queryCacheConfigs.isEmpty()) {
return;
}
Iterator<QueryCacheConfig> iterator = queryCacheConfigs.iterator();
while (iterator.hasNext()) {
QueryCacheConfig config = iterator.next();
if (config.getName().equals(cacheName)) {
iterator.remove();
}
}
}
}
| Donnerbart/hazelcast | hazelcast/src/main/java/com/hazelcast/map/impl/querycache/subscriber/NodeQueryCacheConfigurator.java | Java | apache-2.0 | 3,996 |
/**
* Autogenerated by Thrift
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
#include "thrift/compiler/test/fixtures/fatal/gen-cpp2/service1.h"
#include "thrift/compiler/test/fixtures/fatal/gen-cpp2/service1.tcc"
namespace test_cpp2 { namespace cpp_reflection {
}} // test_cpp2::cpp_reflection
namespace apache { namespace thrift {
}} // apache::thrift
| getyourguide/fbthrift | thrift/compiler/test/fixtures/fatal/gen-cpp2/service1_processmap_compact.cpp | C++ | apache-2.0 | 401 |
#!/usr/bin/env ruby
# This example demonstrates creating a server image with the Rackpace Open Cloud
require 'rubygems' #required for Ruby 1.8.x
require 'fog'
def get_user_input(prompt)
print "#{prompt}: "
gets.chomp
end
def select_server(servers)
abort "\nThere are not any servers available to image in the Chicago region. Try running create_server.rb\n\n" if servers.empty?
puts "\nSelect Server To Image:\n\n"
servers.each_with_index do |server, i|
puts "\t #{i}. #{server.name} [#{server.public_ip_address}]"
end
selected_str = get_user_input "\nEnter Server Number"
servers[selected_str.to_i]
end
# Use username defined in ~/.fog file, if absent prompt for username.
# For more details on ~/.fog refer to http://fog.io/about/getting_started.html
def rackspace_username
Fog.credentials[:rackspace_username] || get_user_input("Enter Rackspace Username")
end
# Use api key defined in ~/.fog file, if absent prompt for api key
# For more details on ~/.fog refer to http://fog.io/about/getting_started.html
def rackspace_api_key
Fog.credentials[:rackspace_api_key] || get_user_input("Enter Rackspace API key")
end
# create Next Generation Cloud Server service
service = Fog::Compute.new({
:provider => 'rackspace',
:rackspace_username => rackspace_username,
:rackspace_api_key => rackspace_api_key,
:version => :v2, # Use Next Gen Cloud Servers
:rackspace_region => :ord #Use Chicago Region
})
# retrieve list of servers
servers = service.servers
# prompt user for server
server = select_server(servers)
# prompt user for image name
image_name = get_user_input "Enter Image Name"
# creates image for server
server.create_image image_name
puts "\nImage #{image_name} is being created for server #{server.name}.\n\n"
puts "To delete the image please execute the delete_image.rb script\n\n"
| luna1x/chef-server | vendor/ruby/1.9.1/gems/fog-1.15.0/lib/fog/rackspace/examples/compute_v2/create_image.rb | Ruby | apache-2.0 | 1,864 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.twitter.springboot;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* This component integrates with Twitter to send tweets or search for tweets
* and more.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@ConfigurationProperties(prefix = "camel.component.twitter")
public class TwitterComponentConfiguration {
/**
* The access token
*/
private String accessToken;
/**
* The access token secret
*/
private String accessTokenSecret;
/**
* The consumer key
*/
private String consumerKey;
/**
* The consumer secret
*/
private String consumerSecret;
/**
* The http proxy host which can be used for the camel-twitter.
*/
private String httpProxyHost;
/**
* The http proxy user which can be used for the camel-twitter.
*/
private String httpProxyUser;
/**
* The http proxy password which can be used for the camel-twitter.
*/
private String httpProxyPassword;
/**
* The http proxy port which can be used for the camel-twitter.
*/
private int httpProxyPort;
public String getAccessToken() {
return accessToken;
}
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
public String getAccessTokenSecret() {
return accessTokenSecret;
}
public void setAccessTokenSecret(String accessTokenSecret) {
this.accessTokenSecret = accessTokenSecret;
}
public String getConsumerKey() {
return consumerKey;
}
public void setConsumerKey(String consumerKey) {
this.consumerKey = consumerKey;
}
public String getConsumerSecret() {
return consumerSecret;
}
public void setConsumerSecret(String consumerSecret) {
this.consumerSecret = consumerSecret;
}
public String getHttpProxyHost() {
return httpProxyHost;
}
public void setHttpProxyHost(String httpProxyHost) {
this.httpProxyHost = httpProxyHost;
}
public String getHttpProxyUser() {
return httpProxyUser;
}
public void setHttpProxyUser(String httpProxyUser) {
this.httpProxyUser = httpProxyUser;
}
public String getHttpProxyPassword() {
return httpProxyPassword;
}
public void setHttpProxyPassword(String httpProxyPassword) {
this.httpProxyPassword = httpProxyPassword;
}
public int getHttpProxyPort() {
return httpProxyPort;
}
public void setHttpProxyPort(int httpProxyPort) {
this.httpProxyPort = httpProxyPort;
}
} | jmandawg/camel | components/camel-twitter/src/main/java/org/apache/camel/component/twitter/springboot/TwitterComponentConfiguration.java | Java | apache-2.0 | 3,496 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.hive.HdfsEnvironment.HdfsContext;
import com.facebook.presto.hive.metastore.Database;
import com.facebook.presto.hive.metastore.Partition;
import com.facebook.presto.hive.metastore.SemiTransactionalHiveMetastore;
import com.facebook.presto.hive.metastore.Storage;
import com.facebook.presto.hive.metastore.Table;
import com.facebook.presto.hive.s3.PrestoS3FileSystem;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.SchemaNotFoundException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.StandardErrorCode;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.type.BigintType;
import com.facebook.presto.spi.type.BooleanType;
import com.facebook.presto.spi.type.CharType;
import com.facebook.presto.spi.type.DateType;
import com.facebook.presto.spi.type.DecimalType;
import com.facebook.presto.spi.type.Decimals;
import com.facebook.presto.spi.type.DoubleType;
import com.facebook.presto.spi.type.IntegerType;
import com.facebook.presto.spi.type.RealType;
import com.facebook.presto.spi.type.SmallintType;
import com.facebook.presto.spi.type.TimestampType;
import com.facebook.presto.spi.type.TinyintType;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.VarbinaryType;
import com.facebook.presto.spi.type.VarcharType;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Shorts;
import com.google.common.primitives.SignedBytes;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FilterFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.ProtectMode;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.Serializer;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.math.BigInteger;
import java.sql.Date;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_DATABASE_LOCATION_ERROR;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_WRITER_DATA_ERROR;
import static com.facebook.presto.hive.HiveUtil.checkCondition;
import static com.facebook.presto.hive.HiveUtil.isArrayType;
import static com.facebook.presto.hive.HiveUtil.isMapType;
import static com.facebook.presto.hive.HiveUtil.isRowType;
import static com.facebook.presto.hive.metastore.MetastoreUtil.getProtectMode;
import static com.facebook.presto.hive.metastore.MetastoreUtil.verifyOnline;
import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED;
import static com.facebook.presto.spi.type.Chars.isCharType;
import static com.google.common.base.Strings.padEnd;
import static java.lang.Float.intBitsToFloat;
import static java.lang.Math.toIntExact;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.UUID.randomUUID;
import static java.util.stream.Collectors.toList;
import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.COMPRESSRESULT;
import static org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableByteObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableIntObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableShortObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableStringObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
import static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo;
import static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getVarcharTypeInfo;
import static org.joda.time.DateTimeZone.UTC;
public final class HiveWriteUtils
{
@SuppressWarnings("OctalInteger")
private static final FsPermission ALL_PERMISSIONS = new FsPermission((short) 0777);
private HiveWriteUtils()
{
}
public static RecordWriter createRecordWriter(Path target, JobConf conf, Properties properties, String outputFormatName)
{
try {
boolean compress = HiveConf.getBoolVar(conf, COMPRESSRESULT);
Object writer = Class.forName(outputFormatName).getConstructor().newInstance();
return ((HiveOutputFormat<?, ?>) writer).getHiveRecordWriter(conf, target, Text.class, compress, properties, Reporter.NULL);
}
catch (IOException | ReflectiveOperationException e) {
throw new PrestoException(HIVE_WRITER_DATA_ERROR, e);
}
}
@SuppressWarnings("deprecation")
public static Serializer initializeSerializer(Configuration conf, Properties properties, String serializerName)
{
try {
Serializer result = (Serializer) Class.forName(serializerName).getConstructor().newInstance();
result.initialize(conf, properties);
return result;
}
catch (SerDeException | ReflectiveOperationException e) {
throw Throwables.propagate(e);
}
}
public static ObjectInspector getJavaObjectInspector(Type type)
{
if (type.equals(BooleanType.BOOLEAN)) {
return javaBooleanObjectInspector;
}
else if (type.equals(BigintType.BIGINT)) {
return javaLongObjectInspector;
}
else if (type.equals(IntegerType.INTEGER)) {
return javaIntObjectInspector;
}
else if (type.equals(SmallintType.SMALLINT)) {
return javaShortObjectInspector;
}
else if (type.equals(TinyintType.TINYINT)) {
return javaByteObjectInspector;
}
else if (type.equals(RealType.REAL)) {
return javaFloatObjectInspector;
}
else if (type.equals(DoubleType.DOUBLE)) {
return javaDoubleObjectInspector;
}
else if (type instanceof VarcharType) {
return writableStringObjectInspector;
}
else if (type instanceof CharType) {
return writableHiveCharObjectInspector;
}
else if (type.equals(VarbinaryType.VARBINARY)) {
return javaByteArrayObjectInspector;
}
else if (type.equals(DateType.DATE)) {
return javaDateObjectInspector;
}
else if (type.equals(TimestampType.TIMESTAMP)) {
return javaTimestampObjectInspector;
}
else if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
return getPrimitiveJavaObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()));
}
else if (isArrayType(type)) {
return ObjectInspectorFactory.getStandardListObjectInspector(getJavaObjectInspector(type.getTypeParameters().get(0)));
}
else if (isMapType(type)) {
ObjectInspector keyObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(0));
ObjectInspector valueObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(1));
return ObjectInspectorFactory.getStandardMapObjectInspector(keyObjectInspector, valueObjectInspector);
}
else if (isRowType(type)) {
return ObjectInspectorFactory.getStandardStructObjectInspector(
type.getTypeSignature().getParameters().stream()
.map(parameter -> parameter.getNamedTypeSignature().getName())
.collect(toList()),
type.getTypeParameters().stream()
.map(HiveWriteUtils::getJavaObjectInspector)
.collect(toList()));
}
throw new IllegalArgumentException("unsupported type: " + type);
}
public static Object getField(Type type, Block block, int position)
{
if (block.isNull(position)) {
return null;
}
if (BooleanType.BOOLEAN.equals(type)) {
return type.getBoolean(block, position);
}
if (BigintType.BIGINT.equals(type)) {
return type.getLong(block, position);
}
if (IntegerType.INTEGER.equals(type)) {
return (int) type.getLong(block, position);
}
if (SmallintType.SMALLINT.equals(type)) {
return (short) type.getLong(block, position);
}
if (TinyintType.TINYINT.equals(type)) {
return (byte) type.getLong(block, position);
}
if (RealType.REAL.equals(type)) {
return intBitsToFloat((int) type.getLong(block, position));
}
if (DoubleType.DOUBLE.equals(type)) {
return type.getDouble(block, position);
}
if (type instanceof VarcharType) {
return new Text(type.getSlice(block, position).getBytes());
}
if (type instanceof CharType) {
CharType charType = (CharType) type;
return new Text(padEnd(type.getSlice(block, position).toStringUtf8(), charType.getLength(), ' '));
}
if (VarbinaryType.VARBINARY.equals(type)) {
return type.getSlice(block, position).getBytes();
}
if (DateType.DATE.equals(type)) {
long days = type.getLong(block, position);
return new Date(UTC.getMillisKeepLocal(DateTimeZone.getDefault(), TimeUnit.DAYS.toMillis(days)));
}
if (TimestampType.TIMESTAMP.equals(type)) {
long millisUtc = type.getLong(block, position);
return new Timestamp(millisUtc);
}
if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
return getHiveDecimal(decimalType, block, position);
}
if (isArrayType(type)) {
Type elementType = type.getTypeParameters().get(0);
Block arrayBlock = block.getObject(position, Block.class);
List<Object> list = new ArrayList<>(arrayBlock.getPositionCount());
for (int i = 0; i < arrayBlock.getPositionCount(); i++) {
Object element = getField(elementType, arrayBlock, i);
list.add(element);
}
return Collections.unmodifiableList(list);
}
if (isMapType(type)) {
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Block mapBlock = block.getObject(position, Block.class);
Map<Object, Object> map = new HashMap<>();
for (int i = 0; i < mapBlock.getPositionCount(); i += 2) {
Object key = getField(keyType, mapBlock, i);
Object value = getField(valueType, mapBlock, i + 1);
map.put(key, value);
}
return Collections.unmodifiableMap(map);
}
if (isRowType(type)) {
Block rowBlock = block.getObject(position, Block.class);
List<Type> fieldTypes = type.getTypeParameters();
checkCondition(fieldTypes.size() == rowBlock.getPositionCount(), StandardErrorCode.GENERIC_INTERNAL_ERROR, "Expected row value field count does not match type field count");
List<Object> row = new ArrayList<>(rowBlock.getPositionCount());
for (int i = 0; i < rowBlock.getPositionCount(); i++) {
Object element = getField(fieldTypes.get(i), rowBlock, i);
row.add(element);
}
return Collections.unmodifiableList(row);
}
throw new PrestoException(NOT_SUPPORTED, "unsupported type: " + type);
}
public static void checkTableIsWritable(Table table, boolean writesToNonManagedTablesEnabled)
{
if (!writesToNonManagedTablesEnabled && !table.getTableType().equals(MANAGED_TABLE.toString())) {
throw new PrestoException(NOT_SUPPORTED, "Cannot write to non-managed Hive table");
}
checkWritable(
new SchemaTableName(table.getDatabaseName(), table.getTableName()),
Optional.empty(),
getProtectMode(table),
table.getParameters(),
table.getStorage());
}
public static void checkPartitionIsWritable(String partitionName, Partition partition)
{
checkWritable(
new SchemaTableName(partition.getDatabaseName(), partition.getTableName()),
Optional.of(partitionName),
getProtectMode(partition),
partition.getParameters(),
partition.getStorage());
}
private static void checkWritable(
SchemaTableName tableName,
Optional<String> partitionName,
ProtectMode protectMode,
Map<String, String> parameters,
Storage storage)
{
String tablePartitionDescription = "Table '" + tableName + "'";
if (partitionName.isPresent()) {
tablePartitionDescription += " partition '" + partitionName.get() + "'";
}
// verify online
verifyOnline(tableName, partitionName, protectMode, parameters);
// verify not read only
if (protectMode.readOnly) {
throw new HiveReadOnlyException(tableName, partitionName);
}
// verify sorting
if (storage.isSorted()) {
throw new PrestoException(NOT_SUPPORTED, format("Inserting into bucketed sorted tables is not supported. %s", tablePartitionDescription));
}
// verify skew info
if (storage.isSkewed()) {
throw new PrestoException(NOT_SUPPORTED, format("Inserting into bucketed tables with skew is not supported. %s", tablePartitionDescription));
}
}
public static Path getTableDefaultLocation(HdfsContext context, SemiTransactionalHiveMetastore metastore, HdfsEnvironment hdfsEnvironment, String schemaName, String tableName)
{
Optional<String> location = getDatabase(metastore, schemaName).getLocation();
if (!location.isPresent() || location.get().isEmpty()) {
throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location is not set", schemaName));
}
Path databasePath = new Path(location.get());
if (!isS3FileSystem(context, hdfsEnvironment, databasePath)) {
if (!pathExists(context, hdfsEnvironment, databasePath)) {
throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location does not exist: %s", schemaName, databasePath));
}
if (!isDirectory(context, hdfsEnvironment, databasePath)) {
throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location is not a directory: %s", schemaName, databasePath));
}
}
return new Path(databasePath, tableName);
}
private static Database getDatabase(SemiTransactionalHiveMetastore metastore, String database)
{
return metastore.getDatabase(database).orElseThrow(() -> new SchemaNotFoundException(database));
}
public static boolean pathExists(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path)
{
try {
return hdfsEnvironment.getFileSystem(context, path).exists(path);
}
catch (IOException e) {
throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e);
}
}
public static boolean isS3FileSystem(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path)
{
try {
return getRawFileSystem(hdfsEnvironment.getFileSystem(context, path)) instanceof PrestoS3FileSystem;
}
catch (IOException e) {
throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e);
}
}
public static boolean isViewFileSystem(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path)
{
try {
// Hadoop 1.x does not have the ViewFileSystem class
return getRawFileSystem(hdfsEnvironment.getFileSystem(context, path))
.getClass().getName().equals("org.apache.hadoop.fs.viewfs.ViewFileSystem");
}
catch (IOException e) {
throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e);
}
}
private static FileSystem getRawFileSystem(FileSystem fileSystem)
{
if (fileSystem instanceof FilterFileSystem) {
return getRawFileSystem(((FilterFileSystem) fileSystem).getRawFileSystem());
}
return fileSystem;
}
private static boolean isDirectory(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path)
{
try {
return hdfsEnvironment.getFileSystem(context, path).isDirectory(path);
}
catch (IOException e) {
throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e);
}
}
public static Path createTemporaryPath(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path targetPath)
{
// use a per-user temporary directory to avoid permission problems
String temporaryPrefix = "/tmp/presto-" + context.getIdentity().getUser();
// use relative temporary directory on ViewFS
if (isViewFileSystem(context, hdfsEnvironment, targetPath)) {
temporaryPrefix = ".hive-staging";
}
// create a temporary directory on the same filesystem
Path temporaryRoot = new Path(targetPath, temporaryPrefix);
Path temporaryPath = new Path(temporaryRoot, randomUUID().toString());
createDirectory(context, hdfsEnvironment, temporaryPath);
return temporaryPath;
}
public static void createDirectory(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path)
{
try {
if (!hdfsEnvironment.getFileSystem(context, path).mkdirs(path, ALL_PERMISSIONS)) {
throw new IOException("mkdirs returned false");
}
}
catch (IOException e) {
throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed to create directory: " + path, e);
}
// explicitly set permission since the default umask overrides it on creation
try {
hdfsEnvironment.getFileSystem(context, path).setPermission(path, ALL_PERMISSIONS);
}
catch (IOException e) {
throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed to set permission on directory: " + path, e);
}
}
public static boolean isWritableType(HiveType hiveType)
{
return isWritableType(hiveType.getTypeInfo());
}
private static boolean isWritableType(TypeInfo typeInfo)
{
switch (typeInfo.getCategory()) {
case PRIMITIVE:
PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
return isWritablePrimitiveType(primitiveCategory);
case MAP:
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
return isWritableType(mapTypeInfo.getMapKeyTypeInfo()) && isWritableType(mapTypeInfo.getMapValueTypeInfo());
case LIST:
ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
return isWritableType(listTypeInfo.getListElementTypeInfo());
case STRUCT:
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
return structTypeInfo.getAllStructFieldTypeInfos().stream().allMatch(HiveWriteUtils::isWritableType);
}
return false;
}
private static boolean isWritablePrimitiveType(PrimitiveCategory primitiveCategory)
{
switch (primitiveCategory) {
case BOOLEAN:
case LONG:
case INT:
case SHORT:
case BYTE:
case FLOAT:
case DOUBLE:
case STRING:
case DATE:
case TIMESTAMP:
case BINARY:
case DECIMAL:
case VARCHAR:
case CHAR:
return true;
}
return false;
}
public static List<ObjectInspector> getRowColumnInspectors(List<Type> types)
{
return types.stream()
.map(HiveWriteUtils::getRowColumnInspector)
.collect(toList());
}
public static ObjectInspector getRowColumnInspector(Type type)
{
if (type.equals(BooleanType.BOOLEAN)) {
return writableBooleanObjectInspector;
}
if (type.equals(BigintType.BIGINT)) {
return writableLongObjectInspector;
}
if (type.equals(IntegerType.INTEGER)) {
return writableIntObjectInspector;
}
if (type.equals(SmallintType.SMALLINT)) {
return writableShortObjectInspector;
}
if (type.equals(TinyintType.TINYINT)) {
return writableByteObjectInspector;
}
if (type.equals(RealType.REAL)) {
return writableFloatObjectInspector;
}
if (type.equals(DoubleType.DOUBLE)) {
return writableDoubleObjectInspector;
}
if (type instanceof VarcharType) {
VarcharType varcharType = (VarcharType) type;
int varcharLength = varcharType.getLength();
// VARCHAR columns with the length less than or equal to 65535 are supported natively by Hive
if (varcharLength <= HiveVarchar.MAX_VARCHAR_LENGTH) {
return getPrimitiveWritableObjectInspector(getVarcharTypeInfo(varcharLength));
}
// Unbounded VARCHAR is not supported by Hive.
// Values for such columns must be stored as STRING in Hive
else if (varcharLength == VarcharType.UNBOUNDED_LENGTH) {
return writableStringObjectInspector;
}
}
if (isCharType(type)) {
CharType charType = (CharType) type;
int charLength = charType.getLength();
return getPrimitiveWritableObjectInspector(getCharTypeInfo(charLength));
}
if (type.equals(VarbinaryType.VARBINARY)) {
return writableBinaryObjectInspector;
}
if (type.equals(DateType.DATE)) {
return writableDateObjectInspector;
}
if (type.equals(TimestampType.TIMESTAMP)) {
return writableTimestampObjectInspector;
}
if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
return getPrimitiveWritableObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()));
}
if (isArrayType(type) || isMapType(type) || isRowType(type)) {
return getJavaObjectInspector(type);
}
throw new IllegalArgumentException("unsupported type: " + type);
}
public static FieldSetter createFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type)
{
if (type.equals(BooleanType.BOOLEAN)) {
return new BooleanFieldSetter(rowInspector, row, field);
}
if (type.equals(BigintType.BIGINT)) {
return new BigintFieldBuilder(rowInspector, row, field);
}
if (type.equals(IntegerType.INTEGER)) {
return new IntFieldSetter(rowInspector, row, field);
}
if (type.equals(SmallintType.SMALLINT)) {
return new SmallintFieldSetter(rowInspector, row, field);
}
if (type.equals(TinyintType.TINYINT)) {
return new TinyintFieldSetter(rowInspector, row, field);
}
if (type.equals(RealType.REAL)) {
return new FloatFieldSetter(rowInspector, row, field);
}
if (type.equals(DoubleType.DOUBLE)) {
return new DoubleFieldSetter(rowInspector, row, field);
}
if (type instanceof VarcharType) {
return new VarcharFieldSetter(rowInspector, row, field, type);
}
if (type instanceof CharType) {
return new CharFieldSetter(rowInspector, row, field, type);
}
if (type.equals(VarbinaryType.VARBINARY)) {
return new BinaryFieldSetter(rowInspector, row, field);
}
if (type.equals(DateType.DATE)) {
return new DateFieldSetter(rowInspector, row, field);
}
if (type.equals(TimestampType.TIMESTAMP)) {
return new TimestampFieldSetter(rowInspector, row, field);
}
if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
return new DecimalFieldSetter(rowInspector, row, field, decimalType);
}
if (isArrayType(type)) {
return new ArrayFieldSetter(rowInspector, row, field, type.getTypeParameters().get(0));
}
if (isMapType(type)) {
return new MapFieldSetter(rowInspector, row, field, type.getTypeParameters().get(0), type.getTypeParameters().get(1));
}
if (isRowType(type)) {
return new RowFieldSetter(rowInspector, row, field, type.getTypeParameters());
}
throw new IllegalArgumentException("unsupported type: " + type);
}
public abstract static class FieldSetter
{
protected final SettableStructObjectInspector rowInspector;
protected final Object row;
protected final StructField field;
protected FieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
this.rowInspector = requireNonNull(rowInspector, "rowInspector is null");
this.row = requireNonNull(row, "row is null");
this.field = requireNonNull(field, "field is null");
}
public abstract void setField(Block block, int position);
}
private static class BooleanFieldSetter
extends FieldSetter
{
private final BooleanWritable value = new BooleanWritable();
public BooleanFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
super(rowInspector, row, field);
}
@Override
public void setField(Block block, int position)
{
value.set(BooleanType.BOOLEAN.getBoolean(block, position));
rowInspector.setStructFieldData(row, field, value);
}
}
private static class BigintFieldBuilder
extends FieldSetter
{
private final LongWritable value = new LongWritable();
public BigintFieldBuilder(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
super(rowInspector, row, field);
}
@Override
public void setField(Block block, int position)
{
value.set(BigintType.BIGINT.getLong(block, position));
rowInspector.setStructFieldData(row, field, value);
}
}
private static class IntFieldSetter
extends FieldSetter
{
private final IntWritable value = new IntWritable();
public IntFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
super(rowInspector, row, field);
}
@Override
public void setField(Block block, int position)
{
value.set(toIntExact(IntegerType.INTEGER.getLong(block, position)));
rowInspector.setStructFieldData(row, field, value);
}
}
private static class SmallintFieldSetter
extends FieldSetter
{
private final ShortWritable value = new ShortWritable();
public SmallintFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
super(rowInspector, row, field);
}
@Override
public void setField(Block block, int position)
{
value.set(Shorts.checkedCast(SmallintType.SMALLINT.getLong(block, position)));
rowInspector.setStructFieldData(row, field, value);
}
}
private static class TinyintFieldSetter
extends FieldSetter
{
private final ByteWritable value = new ByteWritable();
public TinyintFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
super(rowInspector, row, field);
}
@Override
public void setField(Block block, int position)
{
value.set(SignedBytes.checkedCast(TinyintType.TINYINT.getLong(block, position)));
rowInspector.setStructFieldData(row, field, value);
}
}
private static class DoubleFieldSetter
extends FieldSetter
{
private final DoubleWritable value = new DoubleWritable();
public DoubleFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
super(rowInspector, row, field);
}
@Override
public void setField(Block block, int position)
{
value.set(DoubleType.DOUBLE.getDouble(block, position));
rowInspector.setStructFieldData(row, field, value);
}
}
private static class FloatFieldSetter
extends FieldSetter
{
private final FloatWritable value = new FloatWritable();
public FloatFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
super(rowInspector, row, field);
}
@Override
public void setField(Block block, int position)
{
value.set(intBitsToFloat((int) RealType.REAL.getLong(block, position)));
rowInspector.setStructFieldData(row, field, value);
}
}
private static class VarcharFieldSetter
extends FieldSetter
{
private final Text value = new Text();
private final Type type;
public VarcharFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type)
{
super(rowInspector, row, field);
this.type = type;
}
@Override
public void setField(Block block, int position)
{
value.set(type.getSlice(block, position).getBytes());
rowInspector.setStructFieldData(row, field, value);
}
}
private static class CharFieldSetter
extends FieldSetter
{
private final Text value = new Text();
private final Type type;
public CharFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type)
{
super(rowInspector, row, field);
this.type = type;
}
@Override
public void setField(Block block, int position)
{
value.set(type.getSlice(block, position).getBytes());
rowInspector.setStructFieldData(row, field, value);
}
}
private static class BinaryFieldSetter
extends FieldSetter
{
private final BytesWritable value = new BytesWritable();
public BinaryFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
super(rowInspector, row, field);
}
@Override
public void setField(Block block, int position)
{
byte[] bytes = VarbinaryType.VARBINARY.getSlice(block, position).getBytes();
value.set(bytes, 0, bytes.length);
rowInspector.setStructFieldData(row, field, value);
}
}
private static class DateFieldSetter
extends FieldSetter
{
private final DateWritable value = new DateWritable();
public DateFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
super(rowInspector, row, field);
}
@Override
public void setField(Block block, int position)
{
value.set(toIntExact(DateType.DATE.getLong(block, position)));
rowInspector.setStructFieldData(row, field, value);
}
}
private static class TimestampFieldSetter
extends FieldSetter
{
private final TimestampWritable value = new TimestampWritable();
public TimestampFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
super(rowInspector, row, field);
}
@Override
public void setField(Block block, int position)
{
long millisUtc = TimestampType.TIMESTAMP.getLong(block, position);
value.setTime(millisUtc);
rowInspector.setStructFieldData(row, field, value);
}
}
private static class DecimalFieldSetter
extends FieldSetter
{
private final HiveDecimalWritable value = new HiveDecimalWritable();
private final DecimalType decimalType;
public DecimalFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, DecimalType decimalType)
{
super(rowInspector, row, field);
this.decimalType = decimalType;
}
@Override
public void setField(Block block, int position)
{
value.set(getHiveDecimal(decimalType, block, position));
rowInspector.setStructFieldData(row, field, value);
}
}
private static HiveDecimal getHiveDecimal(DecimalType decimalType, Block block, int position)
{
BigInteger unscaledValue;
if (decimalType.isShort()) {
unscaledValue = BigInteger.valueOf(decimalType.getLong(block, position));
}
else {
unscaledValue = Decimals.decodeUnscaledValue(decimalType.getSlice(block, position));
}
return HiveDecimal.create(unscaledValue, decimalType.getScale());
}
private static class ArrayFieldSetter
extends FieldSetter
{
private final Type elementType;
public ArrayFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type elementType)
{
super(rowInspector, row, field);
this.elementType = requireNonNull(elementType, "elementType is null");
}
@Override
public void setField(Block block, int position)
{
Block arrayBlock = block.getObject(position, Block.class);
List<Object> list = new ArrayList<>(arrayBlock.getPositionCount());
for (int i = 0; i < arrayBlock.getPositionCount(); i++) {
Object element = getField(elementType, arrayBlock, i);
list.add(element);
}
rowInspector.setStructFieldData(row, field, list);
}
}
private static class MapFieldSetter
extends FieldSetter
{
private final Type keyType;
private final Type valueType;
public MapFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type keyType, Type valueType)
{
super(rowInspector, row, field);
this.keyType = requireNonNull(keyType, "keyType is null");
this.valueType = requireNonNull(valueType, "valueType is null");
}
@Override
public void setField(Block block, int position)
{
Block mapBlock = block.getObject(position, Block.class);
Map<Object, Object> map = new HashMap<>(mapBlock.getPositionCount() * 2);
for (int i = 0; i < mapBlock.getPositionCount(); i += 2) {
Object key = getField(keyType, mapBlock, i);
Object value = getField(valueType, mapBlock, i + 1);
map.put(key, value);
}
rowInspector.setStructFieldData(row, field, map);
}
}
private static class RowFieldSetter
extends FieldSetter
{
private final List<Type> fieldTypes;
public RowFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, List<Type> fieldTypes)
{
super(rowInspector, row, field);
this.fieldTypes = ImmutableList.copyOf(fieldTypes);
}
@Override
public void setField(Block block, int position)
{
Block rowBlock = block.getObject(position, Block.class);
// TODO reuse row object and use FieldSetters, like we do at the top level
// Ideally, we'd use the same recursive structure starting from the top, but
// this requires modeling row types in the same way we model table rows
// (multiple blocks vs all fields packed in a single block)
List<Object> value = new ArrayList<>(fieldTypes.size());
for (int i = 0; i < fieldTypes.size(); i++) {
Object element = getField(fieldTypes.get(i), rowBlock, i);
value.add(element);
}
rowInspector.setStructFieldData(row, field, value);
}
}
}
| gh351135612/presto | presto-hive/src/main/java/com/facebook/presto/hive/HiveWriteUtils.java | Java | apache-2.0 | 42,741 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_31) on Thu Jan 31 02:04:59 UTC 2013 -->
<TITLE>
org.apache.hadoop.streaming.io (Hadoop 1.1.2 API)
</TITLE>
<META NAME="date" CONTENT="2013-01-31">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
</HEAD>
<BODY BGCOLOR="white">
<FONT size="+1" CLASS="FrameTitleFont">
<A HREF="../../../../../org/apache/hadoop/streaming/io/package-summary.html" target="classFrame">org.apache.hadoop.streaming.io</A></FONT>
<TABLE BORDER="0" WIDTH="100%" SUMMARY="">
<TR>
<TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont">
Classes</FONT>
<FONT CLASS="FrameItemFont">
<BR>
<A HREF="IdentifierResolver.html" title="class in org.apache.hadoop.streaming.io" target="classFrame">IdentifierResolver</A>
<BR>
<A HREF="InputWriter.html" title="class in org.apache.hadoop.streaming.io" target="classFrame">InputWriter</A>
<BR>
<A HREF="OutputReader.html" title="class in org.apache.hadoop.streaming.io" target="classFrame">OutputReader</A>
<BR>
<A HREF="RawBytesInputWriter.html" title="class in org.apache.hadoop.streaming.io" target="classFrame">RawBytesInputWriter</A>
<BR>
<A HREF="RawBytesOutputReader.html" title="class in org.apache.hadoop.streaming.io" target="classFrame">RawBytesOutputReader</A>
<BR>
<A HREF="TextInputWriter.html" title="class in org.apache.hadoop.streaming.io" target="classFrame">TextInputWriter</A>
<BR>
<A HREF="TextOutputReader.html" title="class in org.apache.hadoop.streaming.io" target="classFrame">TextOutputReader</A>
<BR>
<A HREF="TypedBytesInputWriter.html" title="class in org.apache.hadoop.streaming.io" target="classFrame">TypedBytesInputWriter</A>
<BR>
<A HREF="TypedBytesOutputReader.html" title="class in org.apache.hadoop.streaming.io" target="classFrame">TypedBytesOutputReader</A></FONT></TD>
</TR>
</TABLE>
</BODY>
</HTML>
| markkerzner/nn_kove | hadoop/docs/api/org/apache/hadoop/streaming/io/package-frame.html | HTML | apache-2.0 | 1,975 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.siyeh.ig.logging;
import com.intellij.codeInspection.CommonQuickFixBundle;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.codeInspection.ui.ListTable;
import com.intellij.codeInspection.ui.ListWrappingTableModel;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.openapi.util.WriteExternalException;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.xmlb.Accessor;
import com.intellij.util.xmlb.SerializationFilterBase;
import com.intellij.util.xmlb.XmlSerializer;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspection;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.InspectionGadgetsFix;
import com.siyeh.ig.PsiReplacementUtil;
import com.siyeh.ig.psiutils.ClassUtils;
import com.siyeh.ig.psiutils.CommentTracker;
import com.siyeh.ig.ui.UiUtils;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class LoggerInitializedWithForeignClassInspection extends BaseInspection {
@NonNls private static final String DEFAULT_FACTORY_CLASS_NAMES =
// Log4J 1
"org.apache.log4j.Logger," +
// SLF4J
"org.slf4j.LoggerFactory," +
// Apache Commons Logging
"org.apache.commons.logging.LogFactory," +
// Java Util Logging
"java.util.logging.Logger," +
// Log4J 2
"org.apache.logging.log4j.LogManager";
@NonNls private static final String DEFAULT_FACTORY_METHOD_NAMES =
//Log4J 1
"getLogger," +
// SLF4J
"getLogger," +
// Apache Commons Logging
"getLog," +
// Java Util Logging
"getLogger," +
// Log4J 2
"getLogger";
protected final List<String> loggerFactoryClassNames = new ArrayList<>();
protected final List<String> loggerFactoryMethodNames = new ArrayList<>();
@SuppressWarnings("PublicField")
public String loggerClassName = DEFAULT_FACTORY_CLASS_NAMES;
@SuppressWarnings("PublicField")
public @NonNls String loggerFactoryMethodName = DEFAULT_FACTORY_METHOD_NAMES;
{
parseString(loggerClassName, loggerFactoryClassNames);
parseString(loggerFactoryMethodName, loggerFactoryMethodNames);
}
@Override
public JComponent createOptionsPanel() {
final ListTable table = new ListTable(
new ListWrappingTableModel(Arrays.asList(loggerFactoryClassNames, loggerFactoryMethodNames),
InspectionGadgetsBundle.message("logger.factory.class.name"),
InspectionGadgetsBundle.message("logger.factory.method.name")));
final String title = InspectionGadgetsBundle.message("logger.initialized.with.foreign.options.title");
return UiUtils.createAddRemoveTreeClassChooserPanel(table, title);
}
@Override
@NotNull
protected String buildErrorString(Object... infos) {
return InspectionGadgetsBundle.message("logger.initialized.with.foreign.class.problem.descriptor");
}
@Override
@Nullable
protected InspectionGadgetsFix buildFix(Object... infos) {
return new LoggerInitializedWithForeignClassFix((String)infos[0]);
}
@Override
public BaseInspectionVisitor buildVisitor() {
return new LoggerInitializedWithForeignClassVisitor();
}
@Override
public void readSettings(@NotNull Element element) throws InvalidDataException {
super.readSettings(element);
parseString(loggerClassName, loggerFactoryClassNames);
parseString(loggerFactoryMethodName, loggerFactoryMethodNames);
if (loggerFactoryClassNames.size() != loggerFactoryMethodNames.size() || loggerFactoryClassNames.isEmpty()) {
parseString(DEFAULT_FACTORY_CLASS_NAMES, loggerFactoryClassNames);
parseString(DEFAULT_FACTORY_METHOD_NAMES, loggerFactoryMethodNames);
}
}
@Override
public void writeSettings(@NotNull Element element) throws WriteExternalException {
loggerClassName = formatString(loggerFactoryClassNames);
loggerFactoryMethodName = formatString(loggerFactoryMethodNames);
if (loggerFactoryMethodName.equals(DEFAULT_FACTORY_METHOD_NAMES) && loggerClassName.equals(DEFAULT_FACTORY_CLASS_NAMES)) {
// to prevent changing inspection profile with new default, which is mistakenly always written because of bug in serialization below.
loggerFactoryMethodName = "getLogger," +
"getLogger," +
"getLog," +
"getLogger";
// these broken settings are restored correctly in readSettings()
}
XmlSerializer.serializeInto(this, element, new SerializationFilterBase() {
@Override
protected boolean accepts(@NotNull Accessor accessor, @NotNull Object bean, @Nullable Object beanValue) {
final @NonNls String factoryName = accessor.getName();
if ("loggerClassName".equals(factoryName) && DEFAULT_FACTORY_CLASS_NAMES.equals(beanValue)) {
return false;
}
if ("loggerFactoryMethodNames".equals(factoryName) && DEFAULT_FACTORY_METHOD_NAMES.equals(beanValue)) {
return false;
}
return true;
}
});
}
private static final class LoggerInitializedWithForeignClassFix extends InspectionGadgetsFix {
private final String newClassName;
private LoggerInitializedWithForeignClassFix(String newClassName) {
this.newClassName = newClassName;
}
@Override
@NotNull
public String getName() {
return CommonQuickFixBundle.message("fix.replace.with.x", newClassName+".class");
}
@NotNull
@Override
public String getFamilyName() {
return InspectionGadgetsBundle.message("logger.initialized.with.foreign.class.fix.family.name");
}
@Override
protected void doFix(Project project, ProblemDescriptor descriptor) {
final PsiElement element = descriptor.getPsiElement();
if (!(element instanceof PsiClassObjectAccessExpression)) {
return;
}
final PsiClassObjectAccessExpression classObjectAccessExpression = (PsiClassObjectAccessExpression)element;
PsiReplacementUtil.replaceExpression(classObjectAccessExpression, newClassName + ".class", new CommentTracker());
}
}
private class LoggerInitializedWithForeignClassVisitor extends BaseInspectionVisitor {
@Override
public void visitClassObjectAccessExpression(PsiClassObjectAccessExpression expression) {
super.visitClassObjectAccessExpression(expression);
PsiElement parent = expression.getParent();
if (parent instanceof PsiReferenceExpression) {
final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)parent;
if (!expression.equals(referenceExpression.getQualifierExpression())) {
return;
}
@NonNls final String name = referenceExpression.getReferenceName();
if (!"getName".equals(name)) {
return;
}
final PsiElement grandParent = referenceExpression.getParent();
if (!(grandParent instanceof PsiMethodCallExpression)) {
return;
}
final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)grandParent;
final PsiExpressionList list = methodCallExpression.getArgumentList();
if (!list.isEmpty()) {
return;
}
parent = methodCallExpression.getParent();
}
if (!(parent instanceof PsiExpressionList)) {
return;
}
final PsiElement grandParent = parent.getParent();
if (!(grandParent instanceof PsiMethodCallExpression)) {
return;
}
final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)grandParent;
final PsiExpressionList argumentList = methodCallExpression.getArgumentList();
final PsiExpression[] expressions = argumentList.getExpressions();
if (expressions.length != 1) {
return;
}
PsiClass containingClass = ClassUtils.getContainingClass(expression);
while (containingClass instanceof PsiAnonymousClass) {
containingClass = ClassUtils.getContainingClass(containingClass);
}
if (containingClass == null) {
return;
}
final String containingClassName = containingClass.getName();
if (containingClassName == null) {
return;
}
final PsiMethod method = methodCallExpression.resolveMethod();
if (method == null) {
return;
}
final PsiClass aClass = method.getContainingClass();
if (aClass == null) {
return;
}
final String className = aClass.getQualifiedName();
final int index = loggerFactoryClassNames.indexOf(className);
if (index < 0) {
return;
}
final PsiReferenceExpression methodExpression = methodCallExpression.getMethodExpression();
final String referenceName = methodExpression.getReferenceName();
final String loggerFactoryMethodName = loggerFactoryMethodNames.get(index);
if (!loggerFactoryMethodName.equals(referenceName)) {
return;
}
final PsiTypeElement operand = expression.getOperand();
final PsiClass initializerClass = PsiUtil.resolveClassInClassTypeOnly(operand.getType());
if (initializerClass == null) {
return;
}
if (containingClass.equals(initializerClass)) {
return;
}
registerError(expression, containingClassName);
}
}
}
| dahlstrom-g/intellij-community | plugins/InspectionGadgets/src/com/siyeh/ig/logging/LoggerInitializedWithForeignClassInspection.java | Java | apache-2.0 | 9,735 |
Ext.data.ArrayReader=Ext.extend(Ext.data.JsonReader,{readRecords:function(c){var b=this.meta?this.meta.id:null;var h=this.recordType,q=h.prototype.fields;var e=[];var s=c;for(var m=0;m<s.length;m++){var d=s[m];var u={};var a=((b||b===0)&&d[b]!==undefined&&d[b]!==""?d[b]:null);for(var l=0,w=q.length;l<w;l++){var r=q.items[l];var g=r.mapping!==undefined&&r.mapping!==null?r.mapping:l;var t=d[g]!==undefined?d[g]:r.defaultValue;t=r.convert(t,d);u[r.name]=t}var p=new h(u,a);p.json=d;e[e.length]=p}return{records:e,totalRecords:e.length}}}); | Ariah-Group/Continuity | src/main/webapp/javascripts/ext/build/data/ArrayReader-min.js | JavaScript | apache-2.0 | 539 |
package com.cedarsoftware.util;
import org.junit.Assert;
import org.junit.Test;
import java.lang.reflect.Constructor;
import java.lang.reflect.Modifier;
import java.net.InetAddress;
/**
* useful InetAddress Utilities
*
* @author Kenneth Partlow
* <br>
* Copyright (c) Cedar Software LLC
* <br><br>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <br><br>
* http://www.apache.org/licenses/LICENSE-2.0
* <br><br>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class TestInetAddressUtilities
{
@Test
public void testMapUtilitiesConstructor() throws Exception
{
Constructor<InetAddressUtilities> con = InetAddressUtilities.class.getDeclaredConstructor();
Assert.assertEquals(Modifier.PRIVATE, con.getModifiers() & Modifier.PRIVATE);
con.setAccessible(true);
Assert.assertNotNull(con.newInstance());
}
@Test
public void testGetIpAddress() throws Exception {
byte[] bytes = InetAddress.getLocalHost().getAddress();
Assert.assertArrayEquals(bytes, InetAddressUtilities.getIpAddress());
}
@Test
public void testGetLocalHost() throws Exception {
String name = InetAddress.getLocalHost().getHostName();
Assert.assertEquals(name, InetAddressUtilities.getHostName());
}
}
| pluto-build/java-util | src/test/java/com/cedarsoftware/util/TestInetAddressUtilities.java | Java | apache-2.0 | 1,810 |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.java.psi.formatter.java;
import com.intellij.ide.highlighter.JavaFileType;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.util.IncorrectOperationException;
import static com.intellij.formatting.FormatterTestUtils.Action.REFORMAT_WITH_CONTEXT;
/**
* Is intended to hold specific java formatting tests for alignment settings (
* {@code Project Settings - Code Style - Alignment and Braces}).
*
* @author Denis Zhdanov
*/
public class JavaFormatterAlignmentTest extends AbstractJavaFormatterTest {
public void testChainedMethodsAlignment() {
// Inspired by IDEA-30369
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED;
getSettings().getRootSettings().getIndentOptions(JavaFileType.INSTANCE).CONTINUATION_INDENT_SIZE = 8;
doTest();
}
public void testMethodAndChainedField() {
// Inspired by IDEA-79806
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doMethodTest(
"Holder.INSTANCE\n" +
" .foo();",
"Holder.INSTANCE\n" +
" .foo();"
);
}
public void testChainedMethodWithComments() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doMethodTest("AAAAA.b()\n" +
".c() // comment after line\n" +
".d()\n" +
".e();",
"AAAAA.b()\n" +
" .c() // comment after line\n" +
" .d()\n" +
" .e();");
}
public void testChainedMethodWithBlockComment() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doTextTest("class X {\n" +
" public void test() {\n" +
" AAAAAA.b()\n" +
".c()\n" +
".d()\n" +
" /* simple block comment */\n" +
".e();\n" +
" }\n" +
"}",
"class X {\n" +
" public void test() {\n" +
" AAAAAA.b()\n" +
" .c()\n" +
" .d()\n" +
" /* simple block comment */\n" +
" .e();\n" +
" }\n" +
"}");
}
public void testMultipleMethodAnnotationsCommentedInTheMiddle() {
getSettings().BLANK_LINES_AFTER_CLASS_HEADER = 1;
getSettings().getRootSettings().getIndentOptions(JavaFileType.INSTANCE).INDENT_SIZE = 4;
// Inspired by IDEA-53942
doTextTest(
"public class Test {\n" +
" @Override\n" +
"// @XmlElement(name = \"Document\", required = true, type = DocumentType.class)\n" +
" @XmlTransient\n" +
" void foo() {\n" +
"}\n" +
"}",
"public class Test {\n" +
"\n" +
" @Override\n" +
"// @XmlElement(name = \"Document\", required = true, type = DocumentType.class)\n" +
" @XmlTransient\n" +
" void foo() {\n" +
" }\n" +
"}"
);
}
public void testTernaryOperator() {
// Inspired by IDEADEV-13018
getSettings().ALIGN_MULTILINE_TERNARY_OPERATION = true;
doMethodTest("int i = a ? x\n" + ": y;", "int i = a ? x\n" + " : y;");
}
public void testMethodCallArgumentsAndSmartTabs() throws IncorrectOperationException {
// Inspired by IDEADEV-20144.
getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
getSettings().getRootSettings().getIndentOptions(JavaFileType.INSTANCE).SMART_TABS = true;
getSettings().getRootSettings().getIndentOptions(JavaFileType.INSTANCE).USE_TAB_CHARACTER = true;
doTextTest("class Foo {\n" +
" void foo() {\n" +
" bar(new Object[] {\n" +
" \"hello1\",\n" +
" \"hello2\", add(\"hello3\",\n" +
" \"world\")\n" +
"});" +
" }}", "class Foo {\n" +
"\tvoid foo() {\n" +
"\t\tbar(new Object[]{\n" +
"\t\t\t\t\"hello1\",\n" +
"\t\t\t\t\"hello2\", add(\"hello3\",\n" +
"\t\t\t\t \"world\")\n" +
"\t\t});\n" +
"\t}\n" +
"}");
}
public void testArrayInitializer() throws IncorrectOperationException {
// Inspired by IDEADEV-16136
getSettings().ARRAY_INITIALIZER_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS;
getSettings().ALIGN_MULTILINE_ARRAY_INITIALIZER_EXPRESSION = true;
doTextTest(
"@SuppressWarnings({\"UseOfSystemOutOrSystemErr\", \"AssignmentToCollectionOrArrayFieldFromParameter\", \"ReturnOfCollectionOrArrayField\"})\n" +
"public class Some {\n" +
"}",
"@SuppressWarnings({\"UseOfSystemOutOrSystemErr\",\n" +
" \"AssignmentToCollectionOrArrayFieldFromParameter\",\n" +
" \"ReturnOfCollectionOrArrayField\"})\n" +
"public class Some {\n" +
"}");
}
public void testMethodBrackets() {
// Inspired by IDEA-53013
getSettings().ALIGN_MULTILINE_METHOD_BRACKETS = true;
getSettings().ALIGN_MULTILINE_PARENTHESIZED_EXPRESSION = false;
getSettings().ALIGN_MULTILINE_PARAMETERS = true;
getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
getSettings().CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = true;
getSettings().METHOD_PARAMETERS_RPAREN_ON_NEXT_LINE = true;
doClassTest(
"public void foo(int i,\n" +
" int j) {\n" +
"}\n" +
"\n" +
" public void bar() {\n" +
" foo(1,\n" +
" 2);\n" +
" }",
"public void foo(int i,\n" +
" int j\n" +
" ) {\n" +
"}\n" +
"\n" +
"public void bar() {\n" +
" foo(1,\n" +
" 2\n" +
" );\n" +
"}"
);
// Inspired by IDEA-55306
getSettings().ALIGN_MULTILINE_METHOD_BRACKETS = false;
getSettings().CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = false;
String method =
"executeCommand(new Command<Boolean>() {\n" +
" public Boolean run() throws ExecutionException {\n" +
" return doInterrupt();\n" +
" }\n" +
"});";
doMethodTest(method, method);
}
public void testFieldInColumnsAlignment() {
// Inspired by IDEA-55147
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true;
getSettings().FIELD_ANNOTATION_WRAP = CommonCodeStyleSettings.DO_NOT_WRAP;
getSettings().VARIABLE_ANNOTATION_WRAP = CommonCodeStyleSettings.DO_NOT_WRAP;
doTextTest(
"public class FormattingTest {\n" +
"\n" +
" int start = 1;\n" +
" double end = 2;\n" +
"\n" +
" int i2 = 1;\n" +
" double dd2,\n" +
" dd3 = 2;\n" +
"\n" +
" // asd\n" +
" char ccc3 = 'a';\n" +
" double ddd31, ddd32 = 1;\n" +
"\n" +
" private\n" +
" final String s4 = \"\";\n" +
" private\n" +
" transient int i4 = 1;\n" +
"\n" +
" private final String s5 = \"xxx\";\n" +
" private transient int iiii5 = 1;\n" +
" /*sdf*/\n" +
" @MyAnnotation(value = 1, text = 2) float f5 = 1;\n" +
"}",
"public class FormattingTest {\n" +
"\n" +
" int start = 1;\n" +
" double end = 2;\n" +
"\n" +
" int i2 = 1;\n" +
" double dd2,\n" +
" dd3 = 2;\n" +
"\n" +
" // asd\n" +
" char ccc3 = 'a';\n" +
" double ddd31, ddd32 = 1;\n" +
"\n" +
" private\n" +
" final String s4 = \"\";\n" +
" private\n" +
" transient int i4 = 1;\n" +
"\n" +
" private final String s5 = \"xxx\";\n" +
" private transient int iiii5 = 1;\n" +
" /*sdf*/\n" +
" @MyAnnotation(value = 1, text = 2) float f5 = 1;\n" +
"}"
);
}
public void testTabsAndFieldsInColumnsAlignment() {
// Inspired by IDEA-56242
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true;
getIndentOptions().USE_TAB_CHARACTER = true;
doTextTest(
"public class Test {\n" +
"\tprivate Long field2 = null;\n" +
"\tprivate final Object field1 = null;\n" +
"\tprivate int i = 1;\n" +
"}",
"public class Test {\n" +
"\tprivate Long field2 = null;\n" +
"\tprivate final Object field1 = null;\n" +
"\tprivate int i = 1;\n" +
"}"
);
}
public void testDoNotAlignIfNotEnabled() {
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = false;
doTextTest(
"public class Test {\n" +
"private Long field2 = null;\n" +
"private final Object field1 = null;\n" +
"private int i = 1;\n" +
"}",
"public class Test {\n" +
" private Long field2 = null;\n" +
" private final Object field1 = null;\n" +
" private int i = 1;\n" +
"}"
);
}
public void testAnnotatedAndNonAnnotatedFieldsInColumnsAlignment() {
// Inspired by IDEA-60237
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true;
doTextTest(
"public class Test {\n" +
" @Id\n" +
" private final String name;\n" +
" @Column(length = 2 * 1024 * 1024 /* 2 MB */)\n" +
" private String value;\n" +
" private boolean required;\n" +
" private String unsetValue;\n" +
"}",
"public class Test {\n" +
" @Id\n" +
" private final String name;\n" +
" @Column(length = 2 * 1024 * 1024 /* 2 MB */)\n" +
" private String value;\n" +
" private boolean required;\n" +
" private String unsetValue;\n" +
"}"
);
}
public void testAlignThrowsKeyword() {
// Inspired by IDEA-63820
getSettings().ALIGN_THROWS_KEYWORD = true;
doClassTest(
"public void test()\n" +
" throws Exception {}",
"public void test()\n" +
"throws Exception {\n" +
"}"
);
getSettings().ALIGN_THROWS_KEYWORD = false;
doClassTest(
"public void test()\n" +
" throws Exception {}",
"public void test()\n" +
" throws Exception {\n" +
"}"
);
}
public void testAlignResourceList() {
getSettings().KEEP_SIMPLE_BLOCKS_IN_ONE_LINE = true;
getSettings().ALIGN_MULTILINE_RESOURCES = true;
doMethodTest("try (MyResource r1 = null;\n" +
"MyResource r2 = null) { }",
"try (MyResource r1 = null;\n" +
" MyResource r2 = null) { }");
getSettings().ALIGN_MULTILINE_RESOURCES = false;
doMethodTest("try (MyResource r1 = null;\n" +
"MyResource r2 = null) { }",
"try (MyResource r1 = null;\n" +
" MyResource r2 = null) { }");
}
public void testChainedMethodCallsAfterFieldsChain_WithAlignment() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS;
doMethodTest(
"a.current.current.current.getThis().getThis().getThis();",
"a.current.current.current.getThis()\n" +
" .getThis()\n" +
" .getThis();"
);
doMethodTest(
"a.current.current.current.getThis().getThis().getThis().current.getThis().getThis().getThis().getThis();",
"a.current.current.current.getThis()\n" +
" .getThis()\n" +
" .getThis().current.getThis()\n" +
" .getThis()\n" +
" .getThis()\n" +
" .getThis();"
);
String onlyMethodCalls = "getThis().getThis().getThis();";
String formatedMethodCalls = "getThis().getThis()\n" +
" .getThis();";
doMethodTest(onlyMethodCalls, formatedMethodCalls);
}
public void testChainedMethodCallsAfterFieldsChain_WithoutAlignment() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = false;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS;
doMethodTest(
"a.current.current.current.getThis().getThis().getThis();",
"a.current.current.current.getThis()\n" +
" .getThis()\n" +
" .getThis();"
);
}
public void testChainedMethodCalls_WithChopDownIfLongOption() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ON_EVERY_ITEM; // it's equal to "Chop down if long"
getSettings().RIGHT_MARGIN = 50;
String before = "a.current.current.getThis().getThis().getThis().getThis().getThis();";
doMethodTest(
before,
"a.current.current.getThis()\n" +
" .getThis()\n" +
" .getThis()\n" +
" .getThis()\n" +
" .getThis();"
);
getSettings().RIGHT_MARGIN = 80;
doMethodTest(before, before);
}
public void testChainedMethodCalls_WithWrapIfNeededOption() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = false;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED;
getSettings().RIGHT_MARGIN = 50;
String before = "a.current.current.getThis().getThis().getThis().getThis();";
doMethodTest(
before,
"a.current.current.getThis().getThis()\n" +
" .getThis().getThis();"
);
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doMethodTest(
before,
"a.current.current.getThis().getThis()\n" +
" .getThis().getThis();"
);
getSettings().RIGHT_MARGIN = 75;
doMethodTest(before, before);
}
public void testAlignMethodCalls_PassedAsParameters_InMethodCall() {
getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
doMethodTest(
"test(call1(),\n" +
" call2(),\n" +
" call3());\n",
"test(call1(),\n" +
" call2(),\n" +
" call3());\n"
);
}
public void testLocalVariablesAlignment() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int a = 2;\n" +
"String myString = \"my string\"",
"int a = 2;\n" +
"String myString = \"my string\""
);
}
public void testAlignOnlyDeclarationStatements() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
" String s;\n" +
" int a = 2;\n" +
"s = \"abs\";\n" +
"long stamp = 12;",
"String s;\n" +
"int a = 2;\n" +
"s = \"abs\";\n" +
"long stamp = 12;"
);
}
public void testAlignFieldDeclarations() {
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true;
doClassTest(
"char a = '2';\n" +
"int aaaaa = 3;\n" +
"String b;",
"char a = '2';\n" +
"int aaaaa = 3;\n" +
"String b;");
}
public void testAlignVarDeclarations() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"char a = '2';\n" +
"int aaaaa = 3;\n" +
"String b;",
"char a = '2';\n" +
"int aaaaa = 3;\n" +
"String b;");
}
public void testDoNotAlignWhenBlankLine() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int a = 2;\n" +
"\n" +
"String myString = \"my string\"",
"int a = 2;\n" +
"\n" +
"String myString = \"my string\""
);
}
public void testDoNotAlignWhenGroupInterrupted() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int a = 2;\n" +
"System.out.println(\"hi!\")\n" +
"String myString = \"my string\"",
"int a = 2;\n" +
"System.out.println(\"hi!\")\n" +
"String myString = \"my string\""
);
}
public void testDoNotAlignMultiDeclarations() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
" int a, b = 2;\n" +
"String myString = \"my string\"",
"int a, b = 2;\n" +
"String myString = \"my string\""
);
}
public void testDoNotAlignMultilineParams() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int a = 12;\n" +
" Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};",
"int a = 12;\n" +
"Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};"
);
doMethodTest(
" Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};\n" +
"int c = 12;",
"Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};\n" +
"int c = 12;"
);
doMethodTest(
" int ac = 99;\n" +
"Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};\n" +
"int c = 12;",
"int ac = 99;\n" +
"Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};\n" +
"int c = 12;"
);
}
public void testDoNotAlign_IfFirstMultiline() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int\n" +
" i = 0;\n" +
"int[] a = new int[]{1, 2, 0x0052, 0x0053, 0x0054};\n" +
"int var1 = 1;\n" +
"int var2 = 2;",
"int\n" +
" i = 0;\n" +
"int[] a = new int[]{1, 2, 0x0052, 0x0053, 0x0054};\n" +
"int var1 = 1;\n" +
"int var2 = 2;"
);
}
public void testAlign_InMethod() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doClassTest(
"public void run() {\n" +
"\n" +
" int a = 2;\n" +
" String superString = \"\";\n" +
"\n" +
" test(call1(), call2(), call3());\n" +
" }",
"public void run() {\n" +
"\n" +
" int a = 2;\n" +
" String superString = \"\";\n" +
"\n" +
" test(call1(), call2(), call3());\n" +
"}"
);
doClassTest(
"public void run() {\n" +
"\n" +
" test(call1(), call2(), call3());\n" +
"\n" +
" int a = 2;\n" +
" String superString = \"\";\n" +
"}",
"public void run() {\n" +
"\n" +
" test(call1(), call2(), call3());\n" +
"\n" +
" int a = 2;\n" +
" String superString = \"\";\n" +
"}");
}
public void test_Shift_All_AlignedParameters() {
myLineRange = new TextRange(2, 2);
getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
doTextTest(
REFORMAT_WITH_CONTEXT,
"public class Test {\n" +
"\n" +
" public void fooooo(String foo,\n" +
" String booo,\n" +
" String kakadoo) {\n" +
"\n" +
" }\n" +
"\n" +
"}",
"public class Test {\n" +
"\n" +
" public void fooooo(String foo,\n" +
" String booo,\n" +
" String kakadoo) {\n" +
"\n" +
" }\n" +
"\n" +
"}"
);
}
public void test_Align_UnselectedField_IfNeeded() {
myLineRange = new TextRange(2, 2);
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true;
doTextTest(
REFORMAT_WITH_CONTEXT,
"public class Test {\n" +
" public int i = 1;\n" +
" public String iiiiiiiiii = 2;\n" +
"}",
"public class Test {\n" +
" public int i = 1;\n" +
" public String iiiiiiiiii = 2;\n" +
"}"
);
}
public void test_Align_UnselectedVariable_IfNeeded() {
myLineRange = new TextRange(3, 3);
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doTextTest(
REFORMAT_WITH_CONTEXT,
"public class Test {\n" +
" public void test() {\n" +
" int s = 2;\n" +
" String sssss = 3;\n" +
" }\n" +
"}",
"public class Test {\n" +
" public void test() {\n" +
" int s = 2;\n" +
" String sssss = 3;\n" +
" }\n" +
"}"
);
}
public void test_Align_ConsecutiveVars_InsideIfBlock() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"if (a > 2) {\n" +
"int a=2;\n" +
"String name=\"Yarik\";\n" +
"}\n",
"if (a > 2) {\n" +
" int a = 2;\n" +
" String name = \"Yarik\";\n" +
"}\n"
);
}
public void test_Align_ConsecutiveVars_InsideForBlock() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
" for (int i = 0; i < 10; i++) {\n" +
" int a=2;\n" +
" String name=\"Xa\";\n" +
" }\n",
"for (int i = 0; i < 10; i++) {\n" +
" int a = 2;\n" +
" String name = \"Xa\";\n" +
"}\n"
);
}
public void test_Align_ConsecutiveVars_InsideTryBlock() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
" try {\n" +
" int x = getX();\n" +
" String name = \"Ha\";\n" +
" }\n" +
" catch (IOException exception) {\n" +
" int y = 12;\n" +
" String test = \"Test\";\n" +
" }\n" +
" finally {\n" +
" int z = 12;\n" +
" String zzzz = \"pnmhd\";\n" +
" }\n",
"try {\n" +
" int x = getX();\n" +
" String name = \"Ha\";\n" +
"} catch (IOException exception) {\n" +
" int y = 12;\n" +
" String test = \"Test\";\n" +
"} finally {\n" +
" int z = 12;\n" +
" String zzzz = \"pnmhd\";\n" +
"}\n"
);
}
public void test_Align_ConsecutiveVars_InsideCodeBlock() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
" System.out.println(\"AAAA\");\n" +
" int a = 2;\n" +
" \n" +
" {\n" +
" int x=2;\n" +
" String name=3;\n" +
" }\n",
"System.out.println(\"AAAA\");\n" +
"int a = 2;\n" +
"\n" +
"{\n" +
" int x = 2;\n" +
" String name = 3;\n" +
"}\n"
);
}
public void test_AlignComments_BetweenChainedMethodCalls() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doMethodTest(
"ActionBarPullToRefresh.from(getActivity())\n" +
" // Mark the ListView as pullable\n" +
" .theseChildrenArePullable(eventsListView)\n" +
" // Set the OnRefreshListener\n" +
" .listener(this)\n" +
" // Use the AbsListView delegate for StickyListHeadersListView\n" +
" .useViewDelegate(StickyListHeadersListView.class, new AbsListViewDelegate())\n" +
" // Finally commit the setup to our PullToRefreshLayout\n" +
" .setup(mPullToRefreshLayout);",
"ActionBarPullToRefresh.from(getActivity())\n" +
" // Mark the ListView as pullable\n" +
" .theseChildrenArePullable(eventsListView)\n" +
" // Set the OnRefreshListener\n" +
" .listener(this)\n" +
" // Use the AbsListView delegate for StickyListHeadersListView\n" +
" .useViewDelegate(StickyListHeadersListView.class, new AbsListViewDelegate())\n" +
" // Finally commit the setup to our PullToRefreshLayout\n" +
" .setup(mPullToRefreshLayout);"
);
}
public void test_AlignComments_2() {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doClassTest(
"public String returnWithBuilder2() {\n" +
" return MoreObjects\n" +
" .toStringHelper(this)\n" +
" .add(\"value\", value)\n" +
" // comment\n" +
" .toString();\n" +
" }",
"public String returnWithBuilder2() {\n" +
" return MoreObjects\n" +
" .toStringHelper(this)\n" +
" .add(\"value\", value)\n" +
" // comment\n" +
" .toString();\n" +
"}"
);
}
public void test_AlignSubsequentOneLineMethods() {
getSettings().KEEP_SIMPLE_METHODS_IN_ONE_LINE = true;
getSettings().ALIGN_SUBSEQUENT_SIMPLE_METHODS = true;
doTextTest(
"public class Test {\n" +
"\n" +
" public void testSuperDuperFuckerMother() { System.out.println(\"AAA\"); }\n" +
"\n" +
" public void testCounterMounter() { System.out.println(\"XXXX\"); }\n" +
"\n" +
"}",
"public class Test {\n" +
"\n" +
" public void testSuperDuperFuckerMother() { System.out.println(\"AAA\"); }\n" +
"\n" +
" public void testCounterMounter() { System.out.println(\"XXXX\"); }\n" +
"\n" +
"}"
);
}
public void test_alignAssignments() {
getSettings().ALIGN_CONSECUTIVE_ASSIGNMENTS = true;
doTextTest(
"public class Test {\n" +
" void foo(int a, int xyz) {\n" +
" a = 9999;\n" +
" xyz = 1;\n" +
" }\n" +
"}",
"public class Test {\n" +
" void foo(int a, int xyz) {\n" +
" a = 9999;\n" +
" xyz = 1;\n" +
" }\n" +
"}"
);
}
public void test_alignMultilineAssignments() {
getSettings().ALIGN_CONSECUTIVE_ASSIGNMENTS = true;
getSettings().ALIGN_MULTILINE_ASSIGNMENT = true;
doTextTest(
"public class Test {\n" +
" void foo(int a, int xyz) {\n" +
" a = 9999;\n" +
" xyz = a = \n" +
" a = 12;\n" +
" }\n" +
"}",
"public class Test {\n" +
" void foo(int a, int xyz) {\n" +
" a = 9999;\n" +
" xyz = a =\n" +
" a = 12;\n" +
" }\n" +
"}"
);
}
public void test_alignMultilineAssignmentsMixedWithDeclaration() {
getSettings().ALIGN_CONSECUTIVE_ASSIGNMENTS = true;
getSettings().ALIGN_MULTILINE_ASSIGNMENT = true;
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doTextTest(
"public class Test {\n" +
" void foo(int a, int xyz, int bc) {\n" +
" bc = 9999;\n" +
" a = 9999;\n" +
" int basdf = 1234;\n" +
" int as = 3;\n" +
" xyz = a = \n" +
" a = 12;\n" +
" }\n" +
"}",
"public class Test {\n" +
" void foo(int a, int xyz, int bc) {\n" +
" bc = 9999;\n" +
" a = 9999;\n" +
" int basdf = 1234;\n" +
" int as = 3;\n" +
" xyz = a =\n" +
" a = 12;\n" +
" }\n" +
"}"
);
}
public void test_alignAssignmentsFields() {
getSettings().ALIGN_CONSECUTIVE_ASSIGNMENTS = true;
doTextTest(
"public class Test {\n" +
" void foo(A a, int xyz) {\n" +
" a.bar = 9999;\n" +
" xyz = 1;\n" +
" }\n" +
"}",
"public class Test {\n" +
" void foo(A a, int xyz) {\n" +
" a.bar = 9999;\n" +
" xyz = 1;\n" +
" }\n" +
"}"
);
}
public void test_alignMultilineTextBlock() {
getJavaSettings().ALIGN_MULTILINE_TEXT_BLOCKS = true;
doTextTest(
"public class Test {\n" +
" void foo() {\n" +
" String block = \"\"\"\n" +
" text\n" +
" block\n" +
" \"\"\";\n" +
" }\n" +
"}",
"public class Test {\n" +
" void foo() {\n" +
" String block = \"\"\"\n" +
" text\n" +
" block\n" +
" \"\"\";\n" +
" }\n" +
"}"
);
}
@SuppressWarnings("unused")
public void _testIdea199677() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
getSettings().CALL_PARAMETERS_WRAP = 2;
getSettings().CALL_PARAMETERS_LPAREN_ON_NEXT_LINE = true;
getSettings().CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = true;
doTextTest(
"public class Main {\n" +
"\n" +
" public static void main(String[] args) {\n" +
" int one = 1;\n" +
" int a_million_dollars = 1000000;\n" +
"\n" +
" doSomething(one, a_million_dollars);\n" +
" }\n" +
"\n" +
" private static void doSomething(int one, int two) {\n" +
" }\n" +
"\n" +
"}",
"public class Main {\n" +
"\n" +
" public static void main(String[] args) {\n" +
" int one = 1;\n" +
" int a_million_dollars = 1000000;\n" +
"\n" +
" doSomething(\n" +
" one,\n" +
" a_million_dollars\n" +
" );\n" +
" }\n" +
"\n" +
" private static void doSomething(int one, int two) {\n" +
" }\n" +
"\n" +
"}"
);
}
} | dahlstrom-g/intellij-community | java/java-tests/testSrc/com/intellij/java/psi/formatter/java/JavaFormatterAlignmentTest.java | Java | apache-2.0 | 30,615 |
#pragma once
#include "indexer/index.hpp"
#include "geometry/point2d.hpp"
#include "geometry/rect2d.hpp"
#include "geometry/tree4d.hpp"
#include "std/set.hpp"
class Index;
namespace search
{
struct LocalityItem
{
m2::RectD m_rect;
string m_name;
uint32_t m_population;
typedef uint32_t ID;
ID m_id;
LocalityItem(m2::RectD const & rect, uint32_t population, ID id, string const & name);
m2::RectD const & GetLimitRect() const { return m_rect; }
};
class LocalityFinder
{
struct Cache
{
m4::Tree<LocalityItem> m_tree;
set<LocalityItem::ID> m_loaded;
mutable uint32_t m_usage;
m2::RectD m_rect;
Cache() : m_usage(0) {}
void Clear();
void GetLocality(m2::PointD const & pt, string & name) const;
};
public:
LocalityFinder(Index const * pIndex);
void SetLanguage(int8_t lang)
{
if (m_lang != lang)
{
ClearCacheAll();
m_lang = lang;
}
}
void SetViewportByIndex(m2::RectD const & viewport, size_t idx);
/// Set new viewport for the reserved slot only if it's no a part of the previous one.
void SetReservedViewportIfNeeded(m2::RectD const & viewport);
/// Check for localities in pre-cached viewports only.
void GetLocalityInViewport(m2::PointD const & pt, string & name) const;
/// Check for localities in all Index and make new cache if needed.
void GetLocalityCreateCache(m2::PointD const & pt, string & name);
void ClearCacheAll();
void ClearCache(size_t idx);
protected:
void CorrectMinimalRect(m2::RectD & rect) const;
void RecreateCache(Cache & cache, m2::RectD rect) const;
private:
friend class DoLoader;
Index const * m_pIndex;
enum { MAX_VIEWPORT_COUNT = 3 };
Cache m_cache[MAX_VIEWPORT_COUNT];
int8_t m_lang;
};
} // namespace search
| programming086/omim | search/locality_finder.hpp | C++ | apache-2.0 | 1,775 |
/*=========================================================================
Library: CTK
Copyright (c) Kitware Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0.txt
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=========================================================================*/
#ifndef __ctkDoubleRangeSliderEventPlayer_h
#define __ctkDoubleRangeSliderEventPlayer_h
// QtTesting includes
#include <pqWidgetEventPlayer.h>
// CTK includes
#include <ctkPimpl.h>
#include "ctkWidgetsExport.h"
/// Concrete implementation of pqWidgetEventPlayer that translates
/// high-level events into low-level Qt events.
class CTK_WIDGETS_EXPORT ctkDoubleRangeSliderEventPlayer :
public pqWidgetEventPlayer
{
Q_OBJECT
public:
typedef pqWidgetEventPlayer Superclass;
ctkDoubleRangeSliderEventPlayer(QObject* parent = 0);
using Superclass::playEvent;
bool playEvent(QObject *Object, const QString &Command, const QString &Arguments, bool &Error);
private:
Q_DISABLE_COPY(ctkDoubleRangeSliderEventPlayer);
};
#endif
| SINTEFMedtek/CTK | Libs/Widgets/ctkDoubleRangeSliderEventPlayer.h | C | apache-2.0 | 1,490 |
println!("11. Creating and storing CREDENTAIL DEFINITION using anoncreds as Trust Anchor, for the given Schema");
let config_json = r#"{ "support_revocation": false }"#;
let tag = r#"TAG1"#;
let (_cred_def_id, _cred_def_json) = anoncreds::issuer_create_and_store_credential_def(wallet_handle, &trustee_did, &schema_json, tag, None, config_json).wait().unwrap();
// CLEAN UP
println!("12. Close and delete wallet");
indy::wallet::close_wallet(wallet_handle).wait().unwrap();
indy::wallet::delete_wallet(&config, USEFUL_CREDENTIALS).wait().unwrap();
println!("13. Close pool and delete pool ledger config");
pool::close_pool_ledger(pool_handle).wait().unwrap();
pool::delete_pool_ledger(&pool_name).wait().unwrap();
| Artemkaaas/indy-sdk | docs/how-tos/save-schema-and-cred-def/rust/src/step4.rs | Rust | apache-2.0 | 717 |
class ErrorsController < ApplicationController
def error_404
respond_to do |format|
format.html { render template: 'errors/error_404', layout: 'layouts/application', status: 404 }
format.all { render nothing: true, status: 404 }
end
end
def error_500
end
end | mariaro/cypress | app/controllers/errors_controller.rb | Ruby | apache-2.0 | 287 |
<?php
/**
* CakePHP(tm) : Rapid Development Framework (http://cakephp.org)
* Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org)
*
* Licensed under The MIT License
* For full copyright and license information, please see the LICENSE.txt
* Redistributions of files must retain the above copyright notice.
*
* @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org)
* @link http://cakephp.org CakePHP(tm) Project
* @since 0.1.0
* @license http://www.opensource.org/licenses/mit-license.php MIT License
*/
namespace Bake\Test\TestCase\Shell\Task;
use Bake\Shell\Task\BakeTemplateTask;
use Bake\Test\TestCase\TestCase;
use Cake\Core\Plugin;
use Cake\ORM\TableRegistry;
use Cake\View\Helper;
/**
* ControllerTaskTest class
*
*/
class ControllerTaskTest extends TestCase
{
/**
* fixtures
*
* @var array
*/
public $fixtures = [
'plugin.bake.bake_articles',
'plugin.bake.bake_articles_bake_tags',
'plugin.bake.bake_comments',
'plugin.bake.bake_tags'
];
/**
* setUp method
*
* @return void
*/
public function setUp()
{
parent::setUp();
$this->_compareBasePath = Plugin::path('Bake') . 'tests' . DS . 'comparisons' . DS . 'Controller' . DS;
$io = $this->getMock('Cake\Console\ConsoleIo', [], [], '', false);
$this->Task = $this->getMock(
'Bake\Shell\Task\ControllerTask',
['in', 'out', 'err', 'hr', 'createFile', '_stop'],
[$io]
);
$this->Task->name = 'Controller';
$this->Task->connection = 'test';
$this->Task->BakeTemplate = new BakeTemplateTask($io);
$this->Task->Model = $this->getMock(
'Bake\Shell\Task\ModelTask',
['in', 'out', 'err', 'createFile', '_stop'],
[$io]
);
$this->Task->Test = $this->getMock(
'Bake\Shell\Task\TestTask',
[],
[$io]
);
TableRegistry::get('BakeArticles', [
'className' => __NAMESPACE__ . '\BakeArticlesTable'
]);
}
/**
* tearDown method
*
* @return void
*/
public function tearDown()
{
unset($this->Task);
TableRegistry::clear();
parent::tearDown();
Plugin::unload('ControllerTest');
}
/**
* test ListAll
*
* @return void
*/
public function testListAll()
{
$result = $this->Task->listAll();
$this->assertContains('bake_articles', $result);
$this->assertContains('bake_articles_bake_tags', $result);
$this->assertContains('bake_comments', $result);
$this->assertContains('bake_tags', $result);
}
/**
* test component generation
*
* @return void
*/
public function testGetComponents()
{
$result = $this->Task->getComponents();
$this->assertSame([], $result);
$this->Task->params['components'] = ' , Security, , Csrf';
$result = $this->Task->getComponents();
$this->assertSame(['Security', 'Csrf'], $result);
}
/**
* test helper generation
*
* @return void
*/
public function testGetHelpers()
{
$result = $this->Task->getHelpers();
$this->assertSame([], $result);
$this->Task->params['helpers'] = ' , Session , , Number';
$result = $this->Task->getHelpers();
$this->assertSame(['Session', 'Number'], $result);
}
/**
* test bake with various component name variants
*
* @return void
*/
public function testBakeComponents()
{
$this->Task->expects($this->any())
->method('createFile')
->will($this->returnValue(true));
$this->Task->params['no-actions'] = true;
$this->Task->params['components'] = 'Csrf, Auth, Company/TestBakeThree.Something,' .
' TestBake.Other, Apple, NonExistent';
$result = $this->Task->bake('BakeArticles');
$this->assertSameAsFile(__FUNCTION__ . '.php', $result);
}
/**
* test the bake method
*
* @return void
*/
public function testBakeNoActions()
{
$this->Task->expects($this->any())
->method('createFile')
->will($this->returnValue(true));
$this->Task->params['no-actions'] = true;
$this->Task->params['helpers'] = 'Html,Time';
$this->Task->params['components'] = 'Csrf, Auth';
$result = $this->Task->bake('BakeArticles');
$this->assertSameAsFile(__FUNCTION__ . '.php', $result);
}
/**
* test bake with actions.
*
* @return void
*/
public function testBakeActions()
{
$this->Task->params['helpers'] = 'Html,Time';
$this->Task->params['components'] = 'Csrf, Auth';
$filename = APP . 'Controller/BakeArticlesController.php';
$this->Task->expects($this->at(1))
->method('createFile')
->with(
$this->_normalizePath($filename),
$this->stringContains('class BakeArticlesController')
);
$result = $this->Task->bake('BakeArticles');
$this->assertSameAsFile(__FUNCTION__ . '.php', $result);
}
/**
* test bake actions prefixed.
*
* @return void
*/
public function testBakePrefixed()
{
$this->Task->params['prefix'] = 'admin';
$filename = $this->_normalizePath(APP . 'Controller/Admin/BakeArticlesController.php');
$this->Task->expects($this->at(1))
->method('createFile')
->with($filename, $this->anything());
$this->Task->Test->expects($this->at(0))
->method('bake')
->with('Controller', 'Admin\BakeArticles');
$result = $this->Task->bake('BakeArticles');
$this->assertTextContains('namespace App\Controller\Admin;', $result);
$this->assertTextContains('use App\Controller\AppController;', $result);
}
/**
* test bake() with a -plugin param
*
* @return void
*/
public function testBakeWithPlugin()
{
$this->Task->plugin = 'ControllerTest';
Plugin::load('ControllerTest', ['path' => APP . 'Plugin/ControllerTest/']);
$path = APP . 'Plugin/ControllerTest/src/Controller/BakeArticlesController.php';
$this->Task->expects($this->at(1))
->method('createFile')
->with($this->_normalizePath($path))
->will($this->returnValue(true));
$result = $this->Task->bake('BakeArticles');
$this->assertSameAsFile(__FUNCTION__ . '.php', $result);
}
/**
*
* test that bakeActions is creating the correct controller Code. (Using sessions)
*
* @return void
*/
public function testBakeActionsContent()
{
$result = $this->Task->bake('BakeArticles');
$this->assertSameAsFile(__FUNCTION__ . '.php', $result);
}
/**
* test baking a test
*
* @return void
*/
public function testBakeTest()
{
$this->Task->plugin = 'ControllerTest';
$this->Task->connection = 'test';
$this->Task->Test->expects($this->once())
->method('bake')
->with('Controller', 'BakeArticles');
$this->Task->bakeTest('BakeArticles');
$this->assertEquals($this->Task->plugin, $this->Task->Test->plugin);
$this->assertEquals($this->Task->connection, $this->Task->Test->connection);
}
/**
* test baking a test
*
* @return void
*/
public function testBakeTestDisabled()
{
$this->Task->plugin = 'ControllerTest';
$this->Task->connection = 'test';
$this->Task->params['no-test'] = true;
$this->Task->Test->expects($this->never())
->method('bake');
$this->Task->bakeTest('BakeArticles');
}
/**
* Test execute no args.
*
* @return void
*/
public function testMainNoArgs()
{
$this->Task->expects($this->never())
->method('createFile');
$this->Task->expects($this->at(0))
->method('out')
->with($this->stringContains('Possible controllers based on your current database'));
$this->Task->main();
}
/**
* test that execute runs all when the first arg == all
*
* @return void
*/
public function testMainIntoAll()
{
$this->Task->connection = 'test';
$this->Task->params = ['helpers' => 'Time,Text'];
$this->Task->Test->expects($this->atLeastOnce())
->method('bake');
$filename = $this->_normalizePath(APP . 'Controller/BakeArticlesController.php');
$this->Task->expects($this->at(1))
->method('createFile')
->with($filename, $this->logicalAnd(
$this->stringContains('class BakeArticlesController'),
$this->stringContains("\$helpers = ['Time', 'Text']")
))
->will($this->returnValue(true));
$this->Task->all();
}
/**
* data provider for testMainWithControllerNameVariations
*
* @return void
*/
public static function nameVariations()
{
return [
['BakeArticles'], ['bake_articles']
];
}
/**
* test that both plural and singular forms work for controller baking.
*
* @dataProvider nameVariations
* @return void
*/
public function testMainWithControllerNameVariations($name)
{
$this->Task->connection = 'test';
$filename = $this->_normalizePath(APP . 'Controller/BakeArticlesController.php');
$this->Task->expects($this->once())
->method('createFile')
->with($filename, $this->stringContains('public function index()'));
$this->Task->main($name);
}
/**
* test main with plugin.name
*
* @return void
*/
public function testMainWithPluginDot()
{
$this->Task->connection = 'test';
Plugin::load('ControllerTest', ['path' => APP . 'Plugin/ControllerTest/']);
$path = APP . 'Plugin/ControllerTest/src/Controller/BakeArticlesController.php';
$this->Task->expects($this->at(1))
->method('createFile')
->with(
$this->_normalizePath($path),
$this->stringContains('BakeArticlesController extends AppController')
)->will($this->returnValue(true));
$this->Task->main('ControllerTest.BakeArticles');
}
}
| thaysmelo/mobile_maravilhas-pe | sistema/vendor/cakephp/bake/tests/TestCase/Shell/Task/ControllerTaskTest.php | PHP | apache-2.0 | 10,677 |
// std imports
use std::mem;
// external imports
use num::traits::Num;
// local imports
use algebra::structure::MagmaBase;
use super::eo_traits::{ERO, ECO};
use matrix::view::MatrixView;
use matrix::traits::{Shape, MatrixBuffer, Strided};
/// Implementation of Elementary row operations.
impl<'a, T:MagmaBase + Num> ERO<T> for MatrixView<'a, T> {
/// Row scaling by a factor and adding to another row.
/// r_i = r_i + k * r_j
/// The j-th row can be outside the view also.
/// This is the row relative to the start of the view.
#[inline]
fn ero_scale_add(&mut self,
i : usize,
j : isize,
scale : T
)-> &mut MatrixView<'a, T> {
debug_assert! (i < self.num_rows());
let m = self.matrix();
// Compute j-th row in m (by doing offset)
let j = j + (self.start_row() as isize);
debug_assert! (j >= 0);
let j = j as usize;
debug_assert!(j < m.num_rows());
let ptr = m.as_ptr();
// I am allowing modification of the underlying buffer
let ptr : *mut T = unsafe { mem::transmute(ptr) };
let sc = self.start_col();
// Compute initial offsets
let mut offset_a = self.cell_to_offset(i, 0);
let mut offset_b = m.cell_to_offset(j, sc);
let stride_a = self.stride() as isize;
let stride_b = m.stride() as isize;
for _ in 0..self.num_cols(){
unsafe {
let va = *ptr.offset(offset_a);
let vb = *ptr.offset(offset_b);
*ptr.offset(offset_a) = va + scale * vb;
}
// Update offsets
offset_a += stride_a;
offset_b += stride_b;
}
self
}
}
/// Implementation of Elementary column operations.
impl<'a, T:MagmaBase + Num> ECO<T> for MatrixView<'a, T> {
/// Column scaling by a factor and adding to another column.
/// c_i = c_i + k * c_j
/// The j-th column can be outside the view also.
/// This is the column relative to the start of the view.
#[inline]
fn eco_scale_add(&mut self,
i : usize,
j : isize,
scale : T
)-> &mut MatrixView<'a, T> {
debug_assert! (i < self.num_cols());
let m = self.matrix();
// Compute j-th column in m (by doing offset)
let j = j + (self.start_col() as isize);
debug_assert! (j >= 0);
let j = j as usize;
debug_assert!(j < m.num_cols());
let ptr = m.as_ptr();
// I am allowing modification of the underlying buffer
let ptr : *mut T = unsafe { mem::transmute(ptr) };
let sr = self.start_row();
// Compute initial offsets
let mut offset_a = self.cell_to_offset(0, i);
let mut offset_b = m.cell_to_offset(sr, j);
for _ in 0..self.num_rows(){
unsafe {
let va = *ptr.offset(offset_a);
let vb = *ptr.offset(offset_b);
*ptr.offset(offset_a) = va + scale * vb;
}
// Update offsets
offset_a += 1;
offset_b += 1;
}
self
}
}
/******************************************************
*
* Unit tests
*
*******************************************************/
#[cfg(test)]
mod test{
//use super::*;
}
/******************************************************
*
* Bench marks
*
*******************************************************/
#[cfg(test)]
mod bench{
//extern crate test;
//use self::test::Bencher;
//use super::*;
}
| daniel-vainsencher/scirust | src/matrix/eo/eo_view.rs | Rust | apache-2.0 | 3,596 |
#include <stdlib.h>
int main(void) {
int a = 1;
int b = 1;
switch (a) {
case 1 :
b = 3;
break;
case 2 :
b = 5;
break;
}
return (0);
}
| TommesDee/cpachecker | test/programs/simple/switch-tests/switch3.c | C | apache-2.0 | 161 |
// -*- mode: java; c-basic-offset: 2; -*-
// Copyright 2009-2011 Google, All Rights reserved
// Copyright 2011-2012 MIT, All rights reserved
// Released under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
package com.google.appinventor.server;
import com.google.appinventor.server.storage.StorageIo;
import com.google.appinventor.server.storage.StorageIoInstanceHolder;
import com.google.appinventor.server.storage.UnauthorizedAccessException;
import com.google.appinventor.shared.rpc.project.Project;
import com.google.appinventor.shared.rpc.project.ProjectSourceZip;
import com.google.appinventor.shared.rpc.project.RawFile;
import com.google.appinventor.shared.rpc.project.TextFile;
import com.google.appinventor.shared.storage.StorageUtil;
import com.google.common.io.ByteStreams;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/**
* Tests for {@link FileExporterImpl}.
*
*/
public class FileExporterImplTest extends LocalDatastoreTestCase {
private static final String USER_ID = "1";
// The following represent a fake project, containing both source and
// output files, for the purpose of testing.
private static final String FAKE_PROJECT_TYPE = "FakeProjectType";
private static final String PROJECT_NAME = "Project1";
private static final String FORM1_NAME = "Screen1";
private static final String FORM1_QUALIFIED_NAME = "com.yourdomain." + FORM1_NAME;
private static final String FORM1_CONTENT = "Form A\nEnd Form";
private static final String IMAGE1_NAME = "Image.jpg";
private static final byte[] IMAGE_CONTENT = { (byte) 0, (byte) 1, (byte) 32, (byte) 255};
private static final String TARGET1_NAME = "Project1.apk";
private static final String TARGET1_QUALIFIED_NAME = "build/target1/" + TARGET1_NAME;
private static final byte[] TARGET1_CONTENT = "pk1".getBytes();
private static final String TARGET2_NAME = "Project2.pak";
private static final String TARGET2_QUALIFIED_NAME = "build/target2/" + TARGET2_NAME;
private static final byte[] TARGET2_CONTENT = "pk2".getBytes();
private static final String SETTINGS = "";
private static final String HISTORY = "1:History";
private StorageIo storageIo;
private FileExporterImpl exporter;
private long projectId;
@Override
protected void setUp() throws Exception {
super.setUp();
storageIo = StorageIoInstanceHolder.INSTANCE;
exporter = new FileExporterImpl();
Project project = new Project(PROJECT_NAME);
project.setProjectType(FAKE_PROJECT_TYPE);
project.setProjectHistory(HISTORY);
project.addTextFile(new TextFile(FORM1_QUALIFIED_NAME, ""));
projectId = storageIo.createProject(USER_ID, project, SETTINGS);
storageIo.uploadFile(projectId, FORM1_QUALIFIED_NAME, USER_ID, FORM1_CONTENT,
StorageUtil.DEFAULT_CHARSET);
storageIo.addSourceFilesToProject(USER_ID, projectId, false, IMAGE1_NAME);
storageIo.uploadRawFile(projectId, IMAGE1_NAME, USER_ID, true, IMAGE_CONTENT);
storageIo.addOutputFilesToProject(USER_ID, projectId, TARGET1_QUALIFIED_NAME);
storageIo.uploadRawFile(projectId, TARGET1_QUALIFIED_NAME, USER_ID,
true, TARGET1_CONTENT);
storageIo.addOutputFilesToProject(USER_ID, projectId, TARGET2_QUALIFIED_NAME);
storageIo.uploadRawFile(projectId, TARGET2_QUALIFIED_NAME, USER_ID,
true, TARGET2_CONTENT);
}
private Map<String, byte[]> testExportProjectSourceZipHelper(ProjectSourceZip project)
throws IOException {
ZipInputStream zis =
new ZipInputStream(new ByteArrayInputStream(project.getContent()));
Map<String, byte[]> content = new HashMap<String, byte[]>();
ZipEntry zipEntry;
while ((zipEntry = zis.getNextEntry()) != null) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ByteStreams.copy(zis, baos);
content.put(zipEntry.getName(), baos.toByteArray());
}
assertEquals(content.size(), project.getFileCount());
assertTrue(content.containsKey(FORM1_QUALIFIED_NAME));
assertTrue(content.containsKey(IMAGE1_NAME));
assertFalse(content.containsKey(TARGET1_NAME));
assertEquals(FORM1_CONTENT, new String(content.get(FORM1_QUALIFIED_NAME),
StorageUtil.DEFAULT_CHARSET));
assertTrue(Arrays.equals(IMAGE_CONTENT, content.get(IMAGE1_NAME)));
return content;
}
public void testExportProjectSourceZipWithoutHistory() throws IOException {
ProjectSourceZip project = exporter.exportProjectSourceZip(USER_ID, projectId,
false, false, null);
Map<String, byte[]> content = testExportProjectSourceZipHelper(project);
assertEquals(2, content.size());
/* Do not expect remix history when includeProjectHistory parameter is false
* as in the publish case. */
assertFalse(content.containsKey(FileExporter.REMIX_INFORMATION_FILE_PATH));
}
// TODO(user): Add test with properly formatted history
public void testExportProjectSourceZipWithHistory() throws IOException {
ProjectSourceZip project = exporter.exportProjectSourceZip(USER_ID, projectId,
true, false, null);
Map<String, byte[]> content = testExportProjectSourceZipHelper(project);
assertEquals(3, content.size());
// Expect the remix file to be in
assertTrue(content.containsKey(FileExporter.REMIX_INFORMATION_FILE_PATH));
assertEquals(HISTORY, new String(content.get(FileExporter.REMIX_INFORMATION_FILE_PATH),
StorageUtil.DEFAULT_CHARSET));
}
public void testExportProjectSourceZipWithNonExistingProject() throws IOException {
try {
exporter.exportProjectSourceZip(USER_ID, projectId + 1, false, false, null);
fail();
} catch (Exception e) {
assertTrue(e instanceof IllegalArgumentException ||
e.getCause() instanceof IllegalArgumentException);
}
}
public void testExportProjectOutputFileWithTarget() throws IOException {
RawFile file = exporter.exportProjectOutputFile(USER_ID, projectId, "target1");
assertEquals(TARGET1_NAME, file.getFileName());
assertTrue(Arrays.equals(TARGET1_CONTENT, file.getContent()));
}
public void testExportProjectOutputFileWithNonExistingTraget() throws IOException {
try {
exporter.exportProjectOutputFile(USER_ID, projectId, "target3");
fail();
} catch (IllegalArgumentException e) {
// expected
}
}
public void testExportFile() throws IOException {
RawFile file = exporter.exportFile(USER_ID, projectId, FORM1_QUALIFIED_NAME);
assertEquals(FORM1_QUALIFIED_NAME, file.getFileName());
assertEquals(FORM1_CONTENT, new String(file.getContent(), StorageUtil.DEFAULT_CHARSET));
}
public void testExportFileWithNonExistingFile() throws IOException {
final String nonExistingFileName = FORM1_QUALIFIED_NAME + "1";
try {
exporter.exportFile(USER_ID, projectId, nonExistingFileName);
fail();
} catch (RuntimeException e) {
// expected
// note that FileExporter throws an explicit RuntimeException
}
}
// TODO(user): Add test of exportAllProjectsSourceZip().
}
| kidebit/AudioBlurp | appinventor/appengine/tests/com/google/appinventor/server/FileExporterImplTest.java | Java | apache-2.0 | 7,269 |
# Cloud Foundry Java Buildpack
# Copyright 2013-2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
require 'component_helper'
require 'java_buildpack/framework/introscope_agent'
describe JavaBuildpack::Framework::IntroscopeAgent do
include_context 'component_helper'
let(:configuration) do
{ 'default_agent_name' => "$(expr \"$VCAP_APPLICATION\" : '.*application_name[\": ]*\\([A-Za-z0-9_-]*\\).*')" }
end
let(:vcap_application) do
{ 'application_name' => 'test-application-name',
'application_uris' => %w[test-application-uri-0 test-application-uri-1] }
end
it 'does not detect without introscope-n/a service' do
expect(component.detect).to be_nil
end
context do
let(:credentials) { {} }
before do
allow(services).to receive(:one_service?).with(/introscope/, 'host-name').and_return(true)
allow(services).to receive(:find_service).and_return('credentials' => credentials)
end
it 'detects with introscope-n/a service' do
expect(component.detect).to eq("introscope-agent=#{version}")
end
it 'expands Introscope agent zip',
cache_fixture: 'stub-introscope-agent.tar' do
component.compile
expect(sandbox + 'Agent.jar').to exist
end
it 'raises error if host-name not specified' do
expect { component.release }.to raise_error(/'host-name' credential must be set/)
end
context do
let(:credentials) { { 'host-name' => 'test-host-name' } }
it 'updates JAVA_OPTS' do
component.release
expect(java_opts).to include('-javaagent:$PWD/.java-buildpack/introscope_agent/Agent.jar')
expect(java_opts).to include('-Dcom.wily.introscope.agentProfile=$PWD/.java-buildpack/introscope_agent/core' \
'/config/IntroscopeAgent.profile')
expect(java_opts).to include('-Dintroscope.agent.defaultProcessName=test-application-name')
expect(java_opts).to include('-Dintroscope.agent.hostName=test-application-uri-0')
expect(java_opts).to include('-Dintroscope.agent.enterprisemanager.transport.tcp.host.DEFAULT=test-host-name')
expect(java_opts).to include('-Dcom.wily.introscope.agent.agentName=$(expr "$VCAP_APPLICATION" : ' \
'\'.*application_name[": ]*\\([A-Za-z0-9_-]*\\).*\')')
end
context do
let(:credentials) { super().merge 'agent-name' => 'another-test-agent-name' }
it 'adds agent-name from credentials to JAVA_OPTS if specified' do
component.release
expect(java_opts).to include('-Dcom.wily.introscope.agent.agentName=another-test-agent-name')
end
end
context do
let(:credentials) { super().merge 'port' => 'test-port' }
it 'adds port from credentials to JAVA_OPTS if specified' do
component.release
expect(java_opts).to include('-Dintroscope.agent.enterprisemanager.transport.tcp.port.DEFAULT=test-port')
end
end
context do
let(:credentials) { super().merge 'ssl' => 'true' }
it 'adds ssl socket factory from credentials to JAVA_OPTS if specified' do
component.release
expect(java_opts).to include('-Dintroscope.agent.enterprisemanager.transport.tcp.socketfactory.DEFAULT=' \
'com.wily.isengard.postofficehub.link.net.SSLSocketFactory')
end
end
end
end
end
| afalak/java-buildpack | spec/java_buildpack/framework/introscope_agent_spec.rb | Ruby | apache-2.0 | 4,002 |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using Microsoft.Azure;
using Microsoft.Azure.Commands.Common.Authentication;
using Microsoft.Azure.Commands.Common.Authentication.Models;
using Microsoft.Rest;
using System;
using System.Security;
namespace Microsoft.WindowsAzure.Commands.Common.Test.Mocks
{
public class MockTokenAuthenticationFactory : IAuthenticationFactory
{
public IAccessToken Token { get; set; }
public Func<AzureAccount, AzureEnvironment, string, IAccessToken> TokenProvider { get; set; }
public MockTokenAuthenticationFactory()
{
Token = new MockAccessToken
{
UserId = "Test",
LoginType = LoginType.OrgId,
AccessToken = "abc"
};
TokenProvider = (account, environment, tenant) => Token = new MockAccessToken
{
UserId = account.Id,
LoginType = LoginType.OrgId,
AccessToken = Token.AccessToken
};
}
public MockTokenAuthenticationFactory(string userId, string accessToken)
{
Token = new MockAccessToken
{
UserId = userId,
LoginType = LoginType.OrgId,
AccessToken = accessToken,
};
TokenProvider = ((account, environment, tenant) => Token);
}
public MockTokenAuthenticationFactory(string userId, string accessToken, string tenantId)
{
Token = new MockAccessToken
{
UserId = userId,
LoginType = LoginType.OrgId,
AccessToken = accessToken,
TenantId = tenantId
};
TokenProvider = ((account, environment, tenant) => Token);
}
public IAccessToken Authenticate(
AzureAccount account,
AzureEnvironment environment,
string tenant,
SecureString password,
ShowDialog promptBehavior,
IdentityModel.Clients.ActiveDirectory.TokenCache tokenCache,
AzureEnvironment.Endpoint resourceId = AzureEnvironment.Endpoint.ActiveDirectoryServiceEndpointResourceId)
{
if (account.Id == null)
{
account.Id = "test";
}
if (TokenProvider == null)
{
return new MockAccessToken()
{
AccessToken = account.Id,
LoginType = LoginType.OrgId,
UserId = account.Id
};
}
else
{
return TokenProvider(account, environment, tenant);
}
}
public IAccessToken Authenticate(
AzureAccount account,
AzureEnvironment environment,
string tenant,
SecureString password,
ShowDialog promptBehavior,
AzureEnvironment.Endpoint resourceId = AzureEnvironment.Endpoint.ActiveDirectoryServiceEndpointResourceId)
{
return Authenticate(account, environment, tenant, password, promptBehavior, AzureSession.TokenCache, resourceId);
}
public SubscriptionCloudCredentials GetSubscriptionCloudCredentials(AzureContext context)
{
return new AccessTokenCredential(context.Subscription.Id, Token);
}
public Microsoft.Rest.ServiceClientCredentials GetServiceClientCredentials(AzureContext context)
{
return new Microsoft.Rest.TokenCredentials(Token.AccessToken);
}
public SubscriptionCloudCredentials GetSubscriptionCloudCredentials(AzureContext context, AzureEnvironment.Endpoint targetEndpoint)
{
return new AccessTokenCredential(context.Subscription.Id, Token);
}
public ServiceClientCredentials GetServiceClientCredentials(AzureContext context, AzureEnvironment.Endpoint targetEndpoint)
{
throw new NotImplementedException();
}
}
}
| hovsepm/azure-powershell | src/ResourceManager/Common/Commands.ScenarioTests.ResourceManager.Common/Mocks/MockTokenAuthenticationFactory.cs | C# | apache-2.0 | 4,798 |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.spi;
import com.hazelcast.core.Partition;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.serialization.Data;
/**
* Partition service for Hazelcast clients.
*
* Allows to retrieve information about the partition count, the partition owner or the partitionId of a key.
*/
public interface ClientPartitionService {
Address getPartitionOwner(int partitionId);
int getPartitionId(Data key);
int getPartitionId(Object key);
int getPartitionCount();
Partition getPartition(int partitionId);
}
| tombujok/hazelcast | hazelcast-client/src/main/java/com/hazelcast/client/spi/ClientPartitionService.java | Java | apache-2.0 | 1,185 |
/**
* @file
* <a href="https://travis-ci.org/Xotic750/has-to-string-tag-x"
* title="Travis status">
* <img
* src="https://travis-ci.org/Xotic750/has-to-string-tag-x.svg?branch=master"
* alt="Travis status" height="18">
* </a>
* <a href="https://david-dm.org/Xotic750/has-to-string-tag-x"
* title="Dependency status">
* <img src="https://david-dm.org/Xotic750/has-to-string-tag-x.svg"
* alt="Dependency status" height="18"/>
* </a>
* <a
* href="https://david-dm.org/Xotic750/has-to-string-tag-x#info=devDependencies"
* title="devDependency status">
* <img src="https://david-dm.org/Xotic750/has-to-string-tag-x/dev-status.svg"
* alt="devDependency status" height="18"/>
* </a>
* <a href="https://badge.fury.io/js/has-to-string-tag-x" title="npm version">
* <img src="https://badge.fury.io/js/has-to-string-tag-x.svg"
* alt="npm version" height="18">
* </a>
*
* hasToStringTag tests if @@toStringTag is supported. `true` if supported.
*
* <h2>ECMAScript compatibility shims for legacy JavaScript engines</h2>
* `es5-shim.js` monkey-patches a JavaScript context to contain all EcmaScript 5
* methods that can be faithfully emulated with a legacy JavaScript engine.
*
* `es5-sham.js` monkey-patches other ES5 methods as closely as possible.
* For these methods, as closely as possible to ES5 is not very close.
* Many of these shams are intended only to allow code to be written to ES5
* without causing run-time errors in older engines. In many cases,
* this means that these shams cause many ES5 methods to silently fail.
* Decide carefully whether this is what you want. Note: es5-sham.js requires
* es5-shim.js to be able to work properly.
*
* `json3.js` monkey-patches the EcmaScript 5 JSON implimentation faithfully.
*
* `es6.shim.js` provides compatibility shims so that legacy JavaScript engines
* behave as closely as possible to ECMAScript 6 (Harmony).
*
* @version 1.1.0
* @author Xotic750 <Xotic750@gmail.com>
* @copyright Xotic750
* @license {@link <https://opensource.org/licenses/MIT> MIT}
* @module has-to-string-tag-x
*/
/* jslint maxlen:80, es6:true, white:true */
/* jshint bitwise:true, camelcase:true, curly:true, eqeqeq:true, forin:true,
freeze:true, futurehostile:true, latedef:true, newcap:true, nocomma:true,
nonbsp:true, singleGroups:true, strict:true, undef:true, unused:true,
es3:false, esnext:true, plusplus:true, maxparams:1, maxdepth:1,
maxstatements:3, maxcomplexity:2 */
/* eslint strict: 1, max-statements: 1 */
/* global module */
;(function () { // eslint-disable-line no-extra-semi
'use strict';
/**
* Indicates if `Symbol.toStringTag`exists and is the correct type.
* `true`, if it exists and is the correct type, otherwise `false`.
*
* @type boolean
*/
module.exports = require('has-symbol-support-x') && typeof Symbol.toStringTag === 'symbol';
}());
| BluelabUnifor/sunny-messeger | node_modules/watson-developer-cloud/node_modules/buffer-from/node_modules/is-array-buffer-x/node_modules/has-to-string-tag-x/index.js | JavaScript | apache-2.0 | 2,875 |
#pragma once
#include <Register/Utility.hpp>
namespace Kvasir {
//Serial Peripheral Interface
namespace Spi1Mcr{ ///<Module Configuration Register
using Addr = Register::Address<0x4002d000,0x02e000fe,0x00000000,unsigned>;
///Halt
enum class HaltVal {
v0=0x00000000, ///<Start transfers.
v1=0x00000001, ///<Stop transfers.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(0,0),Register::ReadWriteAccess,HaltVal> halt{};
namespace HaltValC{
constexpr Register::FieldValue<decltype(halt)::Type,HaltVal::v0> v0{};
constexpr Register::FieldValue<decltype(halt)::Type,HaltVal::v1> v1{};
}
///Sample Point
enum class SmplptVal {
v00=0x00000000, ///<0 protocol clock cycles between SCK edge and SIN sample
v01=0x00000001, ///<1 protocol clock cycle between SCK edge and SIN sample
v10=0x00000002, ///<2 protocol clock cycles between SCK edge and SIN sample
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(9,8),Register::ReadWriteAccess,SmplptVal> smplPt{};
namespace SmplptValC{
constexpr Register::FieldValue<decltype(smplPt)::Type,SmplptVal::v00> v00{};
constexpr Register::FieldValue<decltype(smplPt)::Type,SmplptVal::v01> v01{};
constexpr Register::FieldValue<decltype(smplPt)::Type,SmplptVal::v10> v10{};
}
///CLR_RXF
enum class ClrrxfVal {
v0=0x00000000, ///<Do not clear the RX FIFO counter.
v1=0x00000001, ///<Clear the RX FIFO counter.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(10,10),Register::Access<Register::AccessType::writeOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,ClrrxfVal> clrRxf{};
namespace ClrrxfValC{
constexpr Register::FieldValue<decltype(clrRxf)::Type,ClrrxfVal::v0> v0{};
constexpr Register::FieldValue<decltype(clrRxf)::Type,ClrrxfVal::v1> v1{};
}
///Clear TX FIFO
enum class ClrtxfVal {
v0=0x00000000, ///<Do not clear the TX FIFO counter.
v1=0x00000001, ///<Clear the TX FIFO counter.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(11,11),Register::Access<Register::AccessType::writeOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,ClrtxfVal> clrTxf{};
namespace ClrtxfValC{
constexpr Register::FieldValue<decltype(clrTxf)::Type,ClrtxfVal::v0> v0{};
constexpr Register::FieldValue<decltype(clrTxf)::Type,ClrtxfVal::v1> v1{};
}
///Disable Receive FIFO
enum class DisrxfVal {
v0=0x00000000, ///<RX FIFO is enabled.
v1=0x00000001, ///<RX FIFO is disabled.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(12,12),Register::ReadWriteAccess,DisrxfVal> disRxf{};
namespace DisrxfValC{
constexpr Register::FieldValue<decltype(disRxf)::Type,DisrxfVal::v0> v0{};
constexpr Register::FieldValue<decltype(disRxf)::Type,DisrxfVal::v1> v1{};
}
///Disable Transmit FIFO
enum class DistxfVal {
v0=0x00000000, ///<TX FIFO is enabled.
v1=0x00000001, ///<TX FIFO is disabled.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(13,13),Register::ReadWriteAccess,DistxfVal> disTxf{};
namespace DistxfValC{
constexpr Register::FieldValue<decltype(disTxf)::Type,DistxfVal::v0> v0{};
constexpr Register::FieldValue<decltype(disTxf)::Type,DistxfVal::v1> v1{};
}
///Module Disable
enum class MdisVal {
v0=0x00000000, ///<Enables the module clocks.
v1=0x00000001, ///<Allows external logic to disable the module clocks.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(14,14),Register::ReadWriteAccess,MdisVal> mdis{};
namespace MdisValC{
constexpr Register::FieldValue<decltype(mdis)::Type,MdisVal::v0> v0{};
constexpr Register::FieldValue<decltype(mdis)::Type,MdisVal::v1> v1{};
}
///Doze Enable
enum class DozeVal {
v0=0x00000000, ///<Doze mode has no effect on the module.
v1=0x00000001, ///<Doze mode disables the module.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,15),Register::ReadWriteAccess,DozeVal> doze{};
namespace DozeValC{
constexpr Register::FieldValue<decltype(doze)::Type,DozeVal::v0> v0{};
constexpr Register::FieldValue<decltype(doze)::Type,DozeVal::v1> v1{};
}
///Peripheral Chip Select x Inactive State
constexpr Register::FieldLocation<Addr,Register::maskFromRange(20,16),Register::ReadWriteAccess,unsigned> pcsis{};
///Receive FIFO Overflow Overwrite Enable
enum class RooeVal {
v0=0x00000000, ///<Incoming data is ignored.
v1=0x00000001, ///<Incoming data is shifted into the shift register.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(24,24),Register::ReadWriteAccess,RooeVal> rooe{};
namespace RooeValC{
constexpr Register::FieldValue<decltype(rooe)::Type,RooeVal::v0> v0{};
constexpr Register::FieldValue<decltype(rooe)::Type,RooeVal::v1> v1{};
}
///Modified Timing Format Enable
enum class MtfeVal {
v0=0x00000000, ///<Modified SPI transfer format disabled.
v1=0x00000001, ///<Modified SPI transfer format enabled.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(26,26),Register::ReadWriteAccess,MtfeVal> mtfe{};
namespace MtfeValC{
constexpr Register::FieldValue<decltype(mtfe)::Type,MtfeVal::v0> v0{};
constexpr Register::FieldValue<decltype(mtfe)::Type,MtfeVal::v1> v1{};
}
///Freeze
enum class FrzVal {
v0=0x00000000, ///<Do not halt serial transfers in Debug mode.
v1=0x00000001, ///<Halt serial transfers in Debug mode.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(27,27),Register::ReadWriteAccess,FrzVal> frz{};
namespace FrzValC{
constexpr Register::FieldValue<decltype(frz)::Type,FrzVal::v0> v0{};
constexpr Register::FieldValue<decltype(frz)::Type,FrzVal::v1> v1{};
}
///SPI Configuration.
enum class DconfVal {
v00=0x00000000, ///<SPI
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(29,28),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,DconfVal> dconf{};
namespace DconfValC{
constexpr Register::FieldValue<decltype(dconf)::Type,DconfVal::v00> v00{};
}
///Continuous SCK Enable
enum class ContsckeVal {
v0=0x00000000, ///<Continuous SCK disabled.
v1=0x00000001, ///<Continuous SCK enabled.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(30,30),Register::ReadWriteAccess,ContsckeVal> contScke{};
namespace ContsckeValC{
constexpr Register::FieldValue<decltype(contScke)::Type,ContsckeVal::v0> v0{};
constexpr Register::FieldValue<decltype(contScke)::Type,ContsckeVal::v1> v1{};
}
///Master/Slave Mode Select
enum class MstrVal {
v0=0x00000000, ///<Enables Slave mode
v1=0x00000001, ///<Enables Master mode
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,31),Register::ReadWriteAccess,MstrVal> mstr{};
namespace MstrValC{
constexpr Register::FieldValue<decltype(mstr)::Type,MstrVal::v0> v0{};
constexpr Register::FieldValue<decltype(mstr)::Type,MstrVal::v1> v1{};
}
}
namespace Spi1Tcr{ ///<Transfer Count Register
using Addr = Register::Address<0x4002d008,0x0000ffff,0x00000000,unsigned>;
///SPI Transfer Counter
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,16),Register::ReadWriteAccess,unsigned> spiTcnt{};
}
namespace Spi1CtarSlave{ ///<Clock and Transfer Attributes Register (In Slave Mode)
using Addr = Register::Address<0x4002d00c,0x01ffffff,0x00000000,unsigned>;
///Clock Phase
enum class CphaVal {
v0=0x00000000, ///<Data is captured on the leading edge of SCK and changed on the following edge.
v1=0x00000001, ///<Data is changed on the leading edge of SCK and captured on the following edge.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(25,25),Register::ReadWriteAccess,CphaVal> cpha{};
namespace CphaValC{
constexpr Register::FieldValue<decltype(cpha)::Type,CphaVal::v0> v0{};
constexpr Register::FieldValue<decltype(cpha)::Type,CphaVal::v1> v1{};
}
///Clock Polarity
enum class CpolVal {
v0=0x00000000, ///<The inactive state value of SCK is low.
v1=0x00000001, ///<The inactive state value of SCK is high.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(26,26),Register::ReadWriteAccess,CpolVal> cpol{};
namespace CpolValC{
constexpr Register::FieldValue<decltype(cpol)::Type,CpolVal::v0> v0{};
constexpr Register::FieldValue<decltype(cpol)::Type,CpolVal::v1> v1{};
}
///Frame Size
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,27),Register::ReadWriteAccess,unsigned> fmsz{};
}
namespace Spi1Sr{ ///<Status Register
using Addr = Register::Address<0x4002d02c,0x25f50000,0x00000000,unsigned>;
///Pop Next Pointer
constexpr Register::FieldLocation<Addr,Register::maskFromRange(3,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> popnxtptr{};
///RX FIFO Counter
constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,4),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rxctr{};
///Transmit Next Pointer
constexpr Register::FieldLocation<Addr,Register::maskFromRange(11,8),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txnxtptr{};
///TX FIFO Counter
constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,12),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txctr{};
///Receive FIFO Drain Flag
enum class RfdfVal {
v0=0x00000000, ///<RX FIFO is empty.
v1=0x00000001, ///<RX FIFO is not empty.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(17,17),Register::ReadWriteAccess,RfdfVal> rfdf{};
namespace RfdfValC{
constexpr Register::FieldValue<decltype(rfdf)::Type,RfdfVal::v0> v0{};
constexpr Register::FieldValue<decltype(rfdf)::Type,RfdfVal::v1> v1{};
}
///Receive FIFO Overflow Flag
enum class RfofVal {
v0=0x00000000, ///<No Rx FIFO overflow.
v1=0x00000001, ///<Rx FIFO overflow has occurred.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(19,19),Register::ReadWriteAccess,RfofVal> rfof{};
namespace RfofValC{
constexpr Register::FieldValue<decltype(rfof)::Type,RfofVal::v0> v0{};
constexpr Register::FieldValue<decltype(rfof)::Type,RfofVal::v1> v1{};
}
///Transmit FIFO Fill Flag
enum class TfffVal {
v0=0x00000000, ///<TX FIFO is full.
v1=0x00000001, ///<TX FIFO is not full.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(25,25),Register::ReadWriteAccess,TfffVal> tfff{};
namespace TfffValC{
constexpr Register::FieldValue<decltype(tfff)::Type,TfffVal::v0> v0{};
constexpr Register::FieldValue<decltype(tfff)::Type,TfffVal::v1> v1{};
}
///Transmit FIFO Underflow Flag
enum class TfufVal {
v0=0x00000000, ///<No TX FIFO underflow.
v1=0x00000001, ///<TX FIFO underflow has occurred.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(27,27),Register::ReadWriteAccess,TfufVal> tfuf{};
namespace TfufValC{
constexpr Register::FieldValue<decltype(tfuf)::Type,TfufVal::v0> v0{};
constexpr Register::FieldValue<decltype(tfuf)::Type,TfufVal::v1> v1{};
}
///End of Queue Flag
enum class EoqfVal {
v0=0x00000000, ///<EOQ is not set in the executing command.
v1=0x00000001, ///<EOQ is set in the executing SPI command.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(28,28),Register::ReadWriteAccess,EoqfVal> eoqf{};
namespace EoqfValC{
constexpr Register::FieldValue<decltype(eoqf)::Type,EoqfVal::v0> v0{};
constexpr Register::FieldValue<decltype(eoqf)::Type,EoqfVal::v1> v1{};
}
///TX and RX Status
enum class TxrxsVal {
v0=0x00000000, ///<Transmit and receive operations are disabled (The module is in Stopped state).
v1=0x00000001, ///<Transmit and receive operations are enabled (The module is in Running state).
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(30,30),Register::ReadWriteAccess,TxrxsVal> txrxs{};
namespace TxrxsValC{
constexpr Register::FieldValue<decltype(txrxs)::Type,TxrxsVal::v0> v0{};
constexpr Register::FieldValue<decltype(txrxs)::Type,TxrxsVal::v1> v1{};
}
///Transfer Complete Flag
enum class TcfVal {
v0=0x00000000, ///<Transfer not complete.
v1=0x00000001, ///<Transfer complete.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,31),Register::ReadWriteAccess,TcfVal> tcf{};
namespace TcfValC{
constexpr Register::FieldValue<decltype(tcf)::Type,TcfVal::v0> v0{};
constexpr Register::FieldValue<decltype(tcf)::Type,TcfVal::v1> v1{};
}
}
namespace Spi1Rser{ ///<DMA/Interrupt Request Select and Enable Register
using Addr = Register::Address<0x4002d030,0x64f4ffff,0x00000000,unsigned>;
///Receive FIFO Drain DMA or Interrupt Request Select
enum class RfdfdirsVal {
v0=0x00000000, ///<Interrupt request.
v1=0x00000001, ///<DMA request.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(16,16),Register::ReadWriteAccess,RfdfdirsVal> rfdfDirs{};
namespace RfdfdirsValC{
constexpr Register::FieldValue<decltype(rfdfDirs)::Type,RfdfdirsVal::v0> v0{};
constexpr Register::FieldValue<decltype(rfdfDirs)::Type,RfdfdirsVal::v1> v1{};
}
///Receive FIFO Drain Request Enable
enum class RfdfreVal {
v0=0x00000000, ///<RFDF interrupt or DMA requests are disabled.
v1=0x00000001, ///<RFDF interrupt or DMA requests are enabled.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(17,17),Register::ReadWriteAccess,RfdfreVal> rfdfRe{};
namespace RfdfreValC{
constexpr Register::FieldValue<decltype(rfdfRe)::Type,RfdfreVal::v0> v0{};
constexpr Register::FieldValue<decltype(rfdfRe)::Type,RfdfreVal::v1> v1{};
}
///Receive FIFO Overflow Request Enable
enum class RfofreVal {
v0=0x00000000, ///<RFOF interrupt requests are disabled.
v1=0x00000001, ///<RFOF interrupt requests are enabled.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(19,19),Register::ReadWriteAccess,RfofreVal> rfofRe{};
namespace RfofreValC{
constexpr Register::FieldValue<decltype(rfofRe)::Type,RfofreVal::v0> v0{};
constexpr Register::FieldValue<decltype(rfofRe)::Type,RfofreVal::v1> v1{};
}
///Transmit FIFO Fill DMA or Interrupt Request Select
enum class TfffdirsVal {
v0=0x00000000, ///<TFFF flag generates interrupt requests.
v1=0x00000001, ///<TFFF flag generates DMA requests.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(24,24),Register::ReadWriteAccess,TfffdirsVal> tfffDirs{};
namespace TfffdirsValC{
constexpr Register::FieldValue<decltype(tfffDirs)::Type,TfffdirsVal::v0> v0{};
constexpr Register::FieldValue<decltype(tfffDirs)::Type,TfffdirsVal::v1> v1{};
}
///Transmit FIFO Fill Request Enable
enum class TfffreVal {
v0=0x00000000, ///<TFFF interrupts or DMA requests are disabled.
v1=0x00000001, ///<TFFF interrupts or DMA requests are enabled.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(25,25),Register::ReadWriteAccess,TfffreVal> tfffRe{};
namespace TfffreValC{
constexpr Register::FieldValue<decltype(tfffRe)::Type,TfffreVal::v0> v0{};
constexpr Register::FieldValue<decltype(tfffRe)::Type,TfffreVal::v1> v1{};
}
///Transmit FIFO Underflow Request Enable
enum class TfufreVal {
v0=0x00000000, ///<TFUF interrupt requests are disabled.
v1=0x00000001, ///<TFUF interrupt requests are enabled.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(27,27),Register::ReadWriteAccess,TfufreVal> tfufRe{};
namespace TfufreValC{
constexpr Register::FieldValue<decltype(tfufRe)::Type,TfufreVal::v0> v0{};
constexpr Register::FieldValue<decltype(tfufRe)::Type,TfufreVal::v1> v1{};
}
///Finished Request Enable
enum class EoqfreVal {
v0=0x00000000, ///<EOQF interrupt requests are disabled.
v1=0x00000001, ///<EOQF interrupt requests are enabled.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(28,28),Register::ReadWriteAccess,EoqfreVal> eoqfRe{};
namespace EoqfreValC{
constexpr Register::FieldValue<decltype(eoqfRe)::Type,EoqfreVal::v0> v0{};
constexpr Register::FieldValue<decltype(eoqfRe)::Type,EoqfreVal::v1> v1{};
}
///Transmission Complete Request Enable
enum class TcfreVal {
v0=0x00000000, ///<TCF interrupt requests are disabled.
v1=0x00000001, ///<TCF interrupt requests are enabled.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,31),Register::ReadWriteAccess,TcfreVal> tcfRe{};
namespace TcfreValC{
constexpr Register::FieldValue<decltype(tcfRe)::Type,TcfreVal::v0> v0{};
constexpr Register::FieldValue<decltype(tcfRe)::Type,TcfreVal::v1> v1{};
}
}
namespace Spi1Pushr{ ///<PUSH TX FIFO Register In Master Mode
using Addr = Register::Address<0x4002d034,0x03e00000,0x00000000,unsigned>;
///Transmit Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,0),Register::ReadWriteAccess,unsigned> txdata{};
///no description available
constexpr Register::FieldLocation<Addr,Register::maskFromRange(20,16),Register::ReadWriteAccess,unsigned> pcs{};
///Clear Transfer Counter
enum class CtcntVal {
v0=0x00000000, ///<Do not clear the TCR[TCNT] field.
v1=0x00000001, ///<Clear the TCR[TCNT] field.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(26,26),Register::ReadWriteAccess,CtcntVal> ctcnt{};
namespace CtcntValC{
constexpr Register::FieldValue<decltype(ctcnt)::Type,CtcntVal::v0> v0{};
constexpr Register::FieldValue<decltype(ctcnt)::Type,CtcntVal::v1> v1{};
}
///End Of Queue
enum class EoqVal {
v0=0x00000000, ///<The SPI data is not the last data to transfer.
v1=0x00000001, ///<The SPI data is the last data to transfer.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(27,27),Register::ReadWriteAccess,EoqVal> eoq{};
namespace EoqValC{
constexpr Register::FieldValue<decltype(eoq)::Type,EoqVal::v0> v0{};
constexpr Register::FieldValue<decltype(eoq)::Type,EoqVal::v1> v1{};
}
///Clock and Transfer Attributes Select
constexpr Register::FieldLocation<Addr,Register::maskFromRange(30,28),Register::ReadWriteAccess,unsigned> ctas{};
///Continuous Peripheral Chip Select Enable
enum class ContVal {
v0=0x00000000, ///<Return PCSn signals to their inactive state between transfers.
v1=0x00000001, ///<Keep PCSn signals asserted between transfers.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,31),Register::ReadWriteAccess,ContVal> cont{};
namespace ContValC{
constexpr Register::FieldValue<decltype(cont)::Type,ContVal::v0> v0{};
constexpr Register::FieldValue<decltype(cont)::Type,ContVal::v1> v1{};
}
}
namespace Spi1PushrSlave{ ///<PUSH TX FIFO Register In Slave Mode
using Addr = Register::Address<0x4002d034,0x00000000,0x00000000,unsigned>;
///Transmit Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> txdata{};
}
namespace Spi1Popr{ ///<POP RX FIFO Register
using Addr = Register::Address<0x4002d038,0x00000000,0x00000000,unsigned>;
///Received Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rxdata{};
}
namespace Spi1Ctar0{ ///<Clock and Transfer Attributes Register (In Master Mode)
using Addr = Register::Address<0x4002d00c,0x00000000,0x00000000,unsigned>;
///Baud Rate Scaler
constexpr Register::FieldLocation<Addr,Register::maskFromRange(3,0),Register::ReadWriteAccess,unsigned> br{};
///Delay After Transfer Scaler
constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,4),Register::ReadWriteAccess,unsigned> dt{};
///After SCK Delay Scaler
constexpr Register::FieldLocation<Addr,Register::maskFromRange(11,8),Register::ReadWriteAccess,unsigned> asc{};
///PCS to SCK Delay Scaler
constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,12),Register::ReadWriteAccess,unsigned> cssck{};
///Baud Rate Prescaler
enum class PbrVal {
v00=0x00000000, ///<Baud Rate Prescaler value is 2.
v01=0x00000001, ///<Baud Rate Prescaler value is 3.
v10=0x00000002, ///<Baud Rate Prescaler value is 5.
v11=0x00000003, ///<Baud Rate Prescaler value is 7.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(17,16),Register::ReadWriteAccess,PbrVal> pbr{};
namespace PbrValC{
constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v00> v00{};
constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v01> v01{};
constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v10> v10{};
constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v11> v11{};
}
///Delay after Transfer Prescaler
enum class PdtVal {
v00=0x00000000, ///<Delay after Transfer Prescaler value is 1.
v01=0x00000001, ///<Delay after Transfer Prescaler value is 3.
v10=0x00000002, ///<Delay after Transfer Prescaler value is 5.
v11=0x00000003, ///<Delay after Transfer Prescaler value is 7.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(19,18),Register::ReadWriteAccess,PdtVal> pdt{};
namespace PdtValC{
constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v00> v00{};
constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v01> v01{};
constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v10> v10{};
constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v11> v11{};
}
///After SCK Delay Prescaler
enum class PascVal {
v00=0x00000000, ///<Delay after Transfer Prescaler value is 1.
v01=0x00000001, ///<Delay after Transfer Prescaler value is 3.
v10=0x00000002, ///<Delay after Transfer Prescaler value is 5.
v11=0x00000003, ///<Delay after Transfer Prescaler value is 7.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(21,20),Register::ReadWriteAccess,PascVal> pasc{};
namespace PascValC{
constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v00> v00{};
constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v01> v01{};
constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v10> v10{};
constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v11> v11{};
}
///PCS to SCK Delay Prescaler
enum class PcssckVal {
v00=0x00000000, ///<PCS to SCK Prescaler value is 1.
v01=0x00000001, ///<PCS to SCK Prescaler value is 3.
v10=0x00000002, ///<PCS to SCK Prescaler value is 5.
v11=0x00000003, ///<PCS to SCK Prescaler value is 7.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(23,22),Register::ReadWriteAccess,PcssckVal> pcssck{};
namespace PcssckValC{
constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v00> v00{};
constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v01> v01{};
constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v10> v10{};
constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v11> v11{};
}
///LSB First
enum class LsbfeVal {
v0=0x00000000, ///<Data is transferred MSB first.
v1=0x00000001, ///<Data is transferred LSB first.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(24,24),Register::ReadWriteAccess,LsbfeVal> lsbfe{};
namespace LsbfeValC{
constexpr Register::FieldValue<decltype(lsbfe)::Type,LsbfeVal::v0> v0{};
constexpr Register::FieldValue<decltype(lsbfe)::Type,LsbfeVal::v1> v1{};
}
///Clock Phase
enum class CphaVal {
v0=0x00000000, ///<Data is captured on the leading edge of SCK and changed on the following edge.
v1=0x00000001, ///<Data is changed on the leading edge of SCK and captured on the following edge.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(25,25),Register::ReadWriteAccess,CphaVal> cpha{};
namespace CphaValC{
constexpr Register::FieldValue<decltype(cpha)::Type,CphaVal::v0> v0{};
constexpr Register::FieldValue<decltype(cpha)::Type,CphaVal::v1> v1{};
}
///Clock Polarity
enum class CpolVal {
v0=0x00000000, ///<The inactive state value of SCK is low.
v1=0x00000001, ///<The inactive state value of SCK is high.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(26,26),Register::ReadWriteAccess,CpolVal> cpol{};
namespace CpolValC{
constexpr Register::FieldValue<decltype(cpol)::Type,CpolVal::v0> v0{};
constexpr Register::FieldValue<decltype(cpol)::Type,CpolVal::v1> v1{};
}
///Frame Size
constexpr Register::FieldLocation<Addr,Register::maskFromRange(30,27),Register::ReadWriteAccess,unsigned> fmsz{};
///Double Baud Rate
enum class DbrVal {
v0=0x00000000, ///<The baud rate is computed normally with a 50/50 duty cycle.
v1=0x00000001, ///<The baud rate is doubled with the duty cycle depending on the Baud Rate Prescaler.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,31),Register::ReadWriteAccess,DbrVal> dbr{};
namespace DbrValC{
constexpr Register::FieldValue<decltype(dbr)::Type,DbrVal::v0> v0{};
constexpr Register::FieldValue<decltype(dbr)::Type,DbrVal::v1> v1{};
}
}
namespace Spi1Ctar1{ ///<Clock and Transfer Attributes Register (In Master Mode)
using Addr = Register::Address<0x4002d010,0x00000000,0x00000000,unsigned>;
///Baud Rate Scaler
constexpr Register::FieldLocation<Addr,Register::maskFromRange(3,0),Register::ReadWriteAccess,unsigned> br{};
///Delay After Transfer Scaler
constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,4),Register::ReadWriteAccess,unsigned> dt{};
///After SCK Delay Scaler
constexpr Register::FieldLocation<Addr,Register::maskFromRange(11,8),Register::ReadWriteAccess,unsigned> asc{};
///PCS to SCK Delay Scaler
constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,12),Register::ReadWriteAccess,unsigned> cssck{};
///Baud Rate Prescaler
enum class PbrVal {
v00=0x00000000, ///<Baud Rate Prescaler value is 2.
v01=0x00000001, ///<Baud Rate Prescaler value is 3.
v10=0x00000002, ///<Baud Rate Prescaler value is 5.
v11=0x00000003, ///<Baud Rate Prescaler value is 7.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(17,16),Register::ReadWriteAccess,PbrVal> pbr{};
namespace PbrValC{
constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v00> v00{};
constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v01> v01{};
constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v10> v10{};
constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v11> v11{};
}
///Delay after Transfer Prescaler
enum class PdtVal {
v00=0x00000000, ///<Delay after Transfer Prescaler value is 1.
v01=0x00000001, ///<Delay after Transfer Prescaler value is 3.
v10=0x00000002, ///<Delay after Transfer Prescaler value is 5.
v11=0x00000003, ///<Delay after Transfer Prescaler value is 7.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(19,18),Register::ReadWriteAccess,PdtVal> pdt{};
namespace PdtValC{
constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v00> v00{};
constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v01> v01{};
constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v10> v10{};
constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v11> v11{};
}
///After SCK Delay Prescaler
enum class PascVal {
v00=0x00000000, ///<Delay after Transfer Prescaler value is 1.
v01=0x00000001, ///<Delay after Transfer Prescaler value is 3.
v10=0x00000002, ///<Delay after Transfer Prescaler value is 5.
v11=0x00000003, ///<Delay after Transfer Prescaler value is 7.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(21,20),Register::ReadWriteAccess,PascVal> pasc{};
namespace PascValC{
constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v00> v00{};
constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v01> v01{};
constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v10> v10{};
constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v11> v11{};
}
///PCS to SCK Delay Prescaler
enum class PcssckVal {
v00=0x00000000, ///<PCS to SCK Prescaler value is 1.
v01=0x00000001, ///<PCS to SCK Prescaler value is 3.
v10=0x00000002, ///<PCS to SCK Prescaler value is 5.
v11=0x00000003, ///<PCS to SCK Prescaler value is 7.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(23,22),Register::ReadWriteAccess,PcssckVal> pcssck{};
namespace PcssckValC{
constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v00> v00{};
constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v01> v01{};
constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v10> v10{};
constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v11> v11{};
}
///LSB First
enum class LsbfeVal {
v0=0x00000000, ///<Data is transferred MSB first.
v1=0x00000001, ///<Data is transferred LSB first.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(24,24),Register::ReadWriteAccess,LsbfeVal> lsbfe{};
namespace LsbfeValC{
constexpr Register::FieldValue<decltype(lsbfe)::Type,LsbfeVal::v0> v0{};
constexpr Register::FieldValue<decltype(lsbfe)::Type,LsbfeVal::v1> v1{};
}
///Clock Phase
enum class CphaVal {
v0=0x00000000, ///<Data is captured on the leading edge of SCK and changed on the following edge.
v1=0x00000001, ///<Data is changed on the leading edge of SCK and captured on the following edge.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(25,25),Register::ReadWriteAccess,CphaVal> cpha{};
namespace CphaValC{
constexpr Register::FieldValue<decltype(cpha)::Type,CphaVal::v0> v0{};
constexpr Register::FieldValue<decltype(cpha)::Type,CphaVal::v1> v1{};
}
///Clock Polarity
enum class CpolVal {
v0=0x00000000, ///<The inactive state value of SCK is low.
v1=0x00000001, ///<The inactive state value of SCK is high.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(26,26),Register::ReadWriteAccess,CpolVal> cpol{};
namespace CpolValC{
constexpr Register::FieldValue<decltype(cpol)::Type,CpolVal::v0> v0{};
constexpr Register::FieldValue<decltype(cpol)::Type,CpolVal::v1> v1{};
}
///Frame Size
constexpr Register::FieldLocation<Addr,Register::maskFromRange(30,27),Register::ReadWriteAccess,unsigned> fmsz{};
///Double Baud Rate
enum class DbrVal {
v0=0x00000000, ///<The baud rate is computed normally with a 50/50 duty cycle.
v1=0x00000001, ///<The baud rate is doubled with the duty cycle depending on the Baud Rate Prescaler.
};
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,31),Register::ReadWriteAccess,DbrVal> dbr{};
namespace DbrValC{
constexpr Register::FieldValue<decltype(dbr)::Type,DbrVal::v0> v0{};
constexpr Register::FieldValue<decltype(dbr)::Type,DbrVal::v1> v1{};
}
}
namespace Spi1Txfr0{ ///<Transmit FIFO Registers
using Addr = Register::Address<0x4002d03c,0x00000000,0x00000000,unsigned>;
///Transmit Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txdata{};
///Transmit Command or Transmit Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,16),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txcmdTxdata{};
}
namespace Spi1Txfr1{ ///<Transmit FIFO Registers
using Addr = Register::Address<0x4002d040,0x00000000,0x00000000,unsigned>;
///Transmit Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txdata{};
///Transmit Command or Transmit Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,16),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txcmdTxdata{};
}
namespace Spi1Txfr2{ ///<Transmit FIFO Registers
using Addr = Register::Address<0x4002d044,0x00000000,0x00000000,unsigned>;
///Transmit Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txdata{};
///Transmit Command or Transmit Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,16),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txcmdTxdata{};
}
namespace Spi1Txfr3{ ///<Transmit FIFO Registers
using Addr = Register::Address<0x4002d048,0x00000000,0x00000000,unsigned>;
///Transmit Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txdata{};
///Transmit Command or Transmit Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,16),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txcmdTxdata{};
}
namespace Spi1Rxfr0{ ///<Receive FIFO Registers
using Addr = Register::Address<0x4002d07c,0x00000000,0x00000000,unsigned>;
///Receive Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rxdata{};
}
namespace Spi1Rxfr1{ ///<Receive FIFO Registers
using Addr = Register::Address<0x4002d080,0x00000000,0x00000000,unsigned>;
///Receive Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rxdata{};
}
namespace Spi1Rxfr2{ ///<Receive FIFO Registers
using Addr = Register::Address<0x4002d084,0x00000000,0x00000000,unsigned>;
///Receive Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rxdata{};
}
namespace Spi1Rxfr3{ ///<Receive FIFO Registers
using Addr = Register::Address<0x4002d088,0x00000000,0x00000000,unsigned>;
///Receive Data
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rxdata{};
}
}
| porkybrain/Kvasir | Lib/Chip/CM4/Freescale/MK21DA5/SPI1.hpp | C++ | apache-2.0 | 40,340 |
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import time
import shutil
import unittest2 as unittest
from gppylib.db import dbconn
from gppylib.commands.base import Command
from gppylib.commands.gp import GpStart, GpStop
import tinctest
from tinctest.lib import local_path
from mpp.lib.PSQL import PSQL
from mpp.models import MPPTestCase
class transactions(MPPTestCase):
def test_skip_checkpoint_abort_transaction(self):
"""
@description FATAL failure execution handles already committed transactions properly
@created 2013-04-19 00:00:00
@modified 2013-04-19 00:00:00
@tags transaction checkpoint MPP-17817 MPP-17925 MPP-17926 MPP-17927 MPP-17928 schedule_transaction
@product_version gpdb: [4.1.2.5- main]
Repro steps:
1. GPDB is up and running, number of segments is irrelevant, no master standby is required,
no segment mirroring is required
2. inject fault on master for skipping checkpoints
> gpfaultinjector -f checkpoint -m async -y skip -s 1 -o 0
3. inject fault 'fatal' on master, it aborts already committed local transaction
> gpfaultinjector -p 4100 -m async -s 1 -f local_tm_record_transaction_commit -y panic_suppress
4. create table 'test'
> psql template1 -c 'create table test(a int);'
5. connect in utility mode to master and create table, insert rows into table and truncate table
> PGOPTIONS='-c gp_session_role=utility -c allow_system_table_mods=dml' psql -p 4100 template1
begin;
create table test21(a int);
insert into test21(a) values(10);
truncate table test21;
commit;
6. Wait 5 minutes
7. GPDB immediate shutdown and restart, GPDB does not come up with versions without fix,
GPDB comes up with versions with fix
> gpstop -air
"""
master_port = os.getenv("PGPORT", "5432")
cmd = Command(name="gpfaultinjector", cmdStr="gpfaultinjector -f checkpoint -m async -y skip -s 1 -o 0")
cmd.run()
cmd = Command(name="gpfaultinjector",
cmdStr="gpfaultinjector -p %s -m async -s 1 \
-f local_tm_record_transaction_commit -y panic_suppress" % master_port)
cmd.run()
PSQL.run_sql_command("create table mpp17817(a int)")
sql_file = local_path('mpp17817.sql')
PSQL.run_sql_file(sql_file, PGOPTIONS="-c gp_session_role=utility")
time.sleep(300)
cmd = Command(name="gpstop restart immediate",
cmdStr="source %s/greenplum_path.sh;\
gpstop -air" % os.environ["GPHOME"])
cmd.run(validateAfter=True)
# Cleanup
PSQL.run_sql_command("drop table mpp17817")
PSQL.run_sql_command("drop table mpp17817_21")
| CraigHarris/gpdb | src/test/tinc/tincrepo/mpp/gpdb/tests/storage/transaction_management/skip_checkpoint_abort_transaction/test_skip_checkpoint_abort_transaction.py | Python | apache-2.0 | 3,466 |
// +build go1.10,codegen
package api
import (
"encoding/json"
"testing"
)
func buildAPI() *API {
a := &API{}
stringShape := &Shape{
API: a,
ShapeName: "string",
Type: "string",
}
stringShapeRef := &ShapeRef{
API: a,
ShapeName: "string",
Shape: stringShape,
}
intShape := &Shape{
API: a,
ShapeName: "int",
Type: "int",
}
intShapeRef := &ShapeRef{
API: a,
ShapeName: "int",
Shape: intShape,
}
nestedComplexShape := &Shape{
API: a,
ShapeName: "NestedComplexShape",
MemberRefs: map[string]*ShapeRef{
"NestedField": stringShapeRef,
},
Type: "structure",
}
nestedComplexShapeRef := &ShapeRef{
API: a,
ShapeName: "NestedComplexShape",
Shape: nestedComplexShape,
}
nestedListShape := &Shape{
API: a,
ShapeName: "NestedListShape",
MemberRef: *nestedComplexShapeRef,
Type: "list",
}
nestedListShapeRef := &ShapeRef{
API: a,
ShapeName: "NestedListShape",
Shape: nestedListShape,
}
complexShape := &Shape{
API: a,
ShapeName: "ComplexShape",
MemberRefs: map[string]*ShapeRef{
"Field": stringShapeRef,
"List": nestedListShapeRef,
},
Type: "structure",
}
complexShapeRef := &ShapeRef{
API: a,
ShapeName: "ComplexShape",
Shape: complexShape,
}
listShape := &Shape{
API: a,
ShapeName: "ListShape",
MemberRef: *complexShapeRef,
Type: "list",
}
listShapeRef := &ShapeRef{
API: a,
ShapeName: "ListShape",
Shape: listShape,
}
listsShape := &Shape{
API: a,
ShapeName: "ListsShape",
MemberRef: *listShapeRef,
Type: "list",
}
listsShapeRef := &ShapeRef{
API: a,
ShapeName: "ListsShape",
Shape: listsShape,
}
input := &Shape{
API: a,
ShapeName: "FooInput",
MemberRefs: map[string]*ShapeRef{
"BarShape": stringShapeRef,
"ComplexField": complexShapeRef,
"ListField": listShapeRef,
"ListsField": listsShapeRef,
},
Type: "structure",
}
output := &Shape{
API: a,
ShapeName: "FooOutput",
MemberRefs: map[string]*ShapeRef{
"BazShape": intShapeRef,
"ComplexField": complexShapeRef,
"ListField": listShapeRef,
"ListsField": listsShapeRef,
},
Type: "structure",
}
inputRef := ShapeRef{
API: a,
ShapeName: "FooInput",
Shape: input,
}
outputRef := ShapeRef{
API: a,
ShapeName: "FooOutput",
Shape: output,
}
operations := map[string]*Operation{
"Foo": {
API: a,
Name: "Foo",
ExportedName: "Foo",
InputRef: inputRef,
OutputRef: outputRef,
},
}
a.Operations = operations
a.Shapes = map[string]*Shape{
"FooInput": input,
"FooOutput": output,
"string": stringShape,
"int": intShape,
"NestedComplexShape": nestedComplexShape,
"NestedListShape": nestedListShape,
"ComplexShape": complexShape,
"ListShape": listShape,
"ListsShape": listsShape,
}
a.Metadata = Metadata{
ServiceAbbreviation: "FooService",
}
a.BaseImportPath = "github.com/aws/aws-sdk-go/service/"
a.Setup()
return a
}
func TestExampleGeneration(t *testing.T) {
example := `
{
"version": "1.0",
"examples": {
"Foo": [
{
"input": {
"BarShape": "Hello world",
"ComplexField": {
"Field": "bar",
"List": [
{
"NestedField": "qux"
}
]
},
"ListField": [
{
"Field": "baz"
}
],
"ListsField": [
[
{
"Field": "baz"
}
]
]
},
"output": {
"BazShape": 1
},
"comments": {
"input": {
},
"output": {
}
},
"description": "Foo bar baz qux",
"title": "I pity the foo"
}
]
}
}
`
a := buildAPI()
def := &ExamplesDefinition{}
err := json.Unmarshal([]byte(example), def)
if err != nil {
t.Error(err)
}
def.API = a
def.setup()
expected := `
import (
"fmt"
"strings"
"time"
"` + SDKImportRoot + `/aws"
"` + SDKImportRoot + `/aws/awserr"
"` + SDKImportRoot + `/aws/session"
"` + SDKImportRoot + `/service/fooservice"
)
var _ time.Duration
var _ strings.Reader
var _ aws.Config
func parseTime(layout, value string) *time.Time {
t, err := time.Parse(layout, value)
if err != nil {
panic(err)
}
return &t
}
// I pity the foo
//
// Foo bar baz qux
func ExampleFooService_Foo_shared00() {
svc := fooservice.New(session.New())
input := &fooservice.FooInput{
BarShape: aws.String("Hello world"),
ComplexField: &fooservice.ComplexShape{
Field: aws.String("bar"),
List: []*fooservice.NestedComplexShape{
{
NestedField: aws.String("qux"),
},
},
},
ListField: []*fooservice.ComplexShape{
{
Field: aws.String("baz"),
},
},
ListsField: [][]*fooservice.ComplexShape{
{
{
Field: aws.String("baz"),
},
},
},
}
result, err := svc.Foo(input)
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
switch aerr.Code() {
default:
fmt.Println(aerr.Error())
}
} else {
// Print the error, cast err to awserr.Error to get the Code and
// Message from an error.
fmt.Println(err.Error())
}
return
}
fmt.Println(result)
}
`
if expected != a.ExamplesGoCode() {
t.Errorf("Expected:\n%s\nReceived:\n%s\n", expected, a.ExamplesGoCode())
}
}
func TestBuildShape(t *testing.T) {
a := buildAPI()
cases := []struct {
defs map[string]interface{}
expected string
}{
{
defs: map[string]interface{}{
"barShape": "Hello World",
},
expected: "BarShape: aws.String(\"Hello World\"),\n",
},
{
defs: map[string]interface{}{
"BarShape": "Hello World",
},
expected: "BarShape: aws.String(\"Hello World\"),\n",
},
}
for _, c := range cases {
ref := a.Operations["Foo"].InputRef
shapeStr := defaultExamplesBuilder{}.BuildShape(&ref, c.defs, false)
if c.expected != shapeStr {
t.Errorf("Expected:\n%s\nReceived:\n%s", c.expected, shapeStr)
}
}
}
| Miciah/origin | vendor/github.com/aws/aws-sdk-go/private/model/api/example_test.go | GO | apache-2.0 | 6,120 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// ReSharper disable MemberCanBePrivate.Global
// ReSharper disable UnusedMember.Global
// ReSharper disable UnusedAutoPropertyAccessor.Global
namespace Apache.Ignite.Core.Cache.Configuration
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Linq;
using System.Xml.Serialization;
using Apache.Ignite.Core.Cache;
using Apache.Ignite.Core.Cache.Affinity;
using Apache.Ignite.Core.Cache.Affinity.Rendezvous;
using Apache.Ignite.Core.Cache.Eviction;
using Apache.Ignite.Core.Cache.Expiry;
using Apache.Ignite.Core.Cache.Store;
using Apache.Ignite.Core.Common;
using Apache.Ignite.Core.Configuration;
using Apache.Ignite.Core.Impl;
using Apache.Ignite.Core.Impl.Binary;
using Apache.Ignite.Core.Impl.Cache.Affinity;
using Apache.Ignite.Core.Impl.Cache.Expiry;
using Apache.Ignite.Core.Impl.Client;
using Apache.Ignite.Core.Log;
using Apache.Ignite.Core.Plugin.Cache;
using BinaryReader = Apache.Ignite.Core.Impl.Binary.BinaryReader;
using BinaryWriter = Apache.Ignite.Core.Impl.Binary.BinaryWriter;
/// <summary>
/// Defines grid cache configuration.
/// </summary>
public class CacheConfiguration : IBinaryRawWriteAwareEx<BinaryWriter>
{
/// <summary> Default size of rebalance thread pool. </summary>
public const int DefaultRebalanceThreadPoolSize = 2;
/// <summary> Default rebalance timeout.</summary>
public static readonly TimeSpan DefaultRebalanceTimeout = TimeSpan.FromMilliseconds(10000);
/// <summary> Time to wait between rebalance messages to avoid overloading CPU. </summary>
public static readonly TimeSpan DefaultRebalanceThrottle = TimeSpan.Zero;
/// <summary> Default number of backups. </summary>
public const int DefaultBackups = 0;
/// <summary> Default caching mode. </summary>
public const CacheMode DefaultCacheMode = CacheMode.Partitioned;
/// <summary> Default atomicity mode. </summary>
public const CacheAtomicityMode DefaultAtomicityMode = CacheAtomicityMode.Atomic;
/// <summary> Default lock timeout. </summary>
public static readonly TimeSpan DefaultLockTimeout = TimeSpan.Zero;
/// <summary> Default cache size to use with eviction policy. </summary>
public const int DefaultCacheSize = 100000;
/// <summary> Default value for 'invalidate' flag that indicates if this is invalidation-based cache. </summary>
public const bool DefaultInvalidate = false;
/// <summary> Default rebalance mode for distributed cache. </summary>
public const CacheRebalanceMode DefaultRebalanceMode = CacheRebalanceMode.Async;
/// <summary> Default rebalance batch size in bytes. </summary>
public const int DefaultRebalanceBatchSize = 512*1024; // 512K
/// <summary> Default value for <see cref="WriteSynchronizationMode"/> property.</summary>
public const CacheWriteSynchronizationMode DefaultWriteSynchronizationMode =
CacheWriteSynchronizationMode.PrimarySync;
/// <summary> Default value for eager ttl flag. </summary>
public const bool DefaultEagerTtl = true;
/// <summary> Default value for 'maxConcurrentAsyncOps'. </summary>
public const int DefaultMaxConcurrentAsyncOperations = 500;
/// <summary> Default value for 'writeBehindEnabled' flag. </summary>
public const bool DefaultWriteBehindEnabled = false;
/// <summary> Default flush size for write-behind cache store. </summary>
public const int DefaultWriteBehindFlushSize = 10240; // 10K
/// <summary> Default flush frequency for write-behind cache store. </summary>
public static readonly TimeSpan DefaultWriteBehindFlushFrequency = TimeSpan.FromMilliseconds(5000);
/// <summary> Default count of flush threads for write-behind cache store. </summary>
public const int DefaultWriteBehindFlushThreadCount = 1;
/// <summary> Default batch size for write-behind cache store. </summary>
public const int DefaultWriteBehindBatchSize = 512;
/// <summary> Default value for load previous value flag. </summary>
public const bool DefaultLoadPreviousValue = false;
/// <summary> Default value for 'readFromBackup' flag. </summary>
public const bool DefaultReadFromBackup = true;
/// <summary> Default timeout after which long query warning will be printed. </summary>
public static readonly TimeSpan DefaultLongQueryWarningTimeout = TimeSpan.FromMilliseconds(3000);
/// <summary> Default value for keep portable in store behavior .</summary>
[Obsolete("Use DefaultKeepBinaryInStore instead.")]
public const bool DefaultKeepVinaryInStore = true;
/// <summary> Default value for <see cref="KeepBinaryInStore"/> property.</summary>
public const bool DefaultKeepBinaryInStore = false;
/// <summary> Default value for 'copyOnRead' flag. </summary>
public const bool DefaultCopyOnRead = true;
/// <summary> Default value for read-through behavior. </summary>
public const bool DefaultReadThrough = false;
/// <summary> Default value for write-through behavior. </summary>
public const bool DefaultWriteThrough = false;
/// <summary> Default value for <see cref="WriteBehindCoalescing"/>. </summary>
public const bool DefaultWriteBehindCoalescing = true;
/// <summary> Default value for <see cref="PartitionLossPolicy"/>. </summary>
public const PartitionLossPolicy DefaultPartitionLossPolicy = PartitionLossPolicy.Ignore;
/// <summary> Default value for <see cref="SqlIndexMaxInlineSize"/>. </summary>
public const int DefaultSqlIndexMaxInlineSize = -1;
/// <summary> Default value for <see cref="StoreConcurrentLoadAllThreshold"/>. </summary>
public const int DefaultStoreConcurrentLoadAllThreshold = 5;
/// <summary> Default value for <see cref="RebalanceOrder"/>. </summary>
public const int DefaultRebalanceOrder = 0;
/// <summary> Default value for <see cref="RebalanceBatchesPrefetchCount"/>. </summary>
public const long DefaultRebalanceBatchesPrefetchCount = 2;
/// <summary> Default value for <see cref="MaxQueryIteratorsCount"/>. </summary>
public const int DefaultMaxQueryIteratorsCount = 1024;
/// <summary> Default value for <see cref="QueryDetailMetricsSize"/>. </summary>
public const int DefaultQueryDetailMetricsSize = 0;
/// <summary> Default value for <see cref="QueryParallelism"/>. </summary>
public const int DefaultQueryParallelism = 1;
/// <summary> Default value for <see cref="EncryptionEnabled"/>. </summary>
public const bool DefaultEncryptionEnabled = false;
/// <summary>
/// Gets or sets the cache name.
/// </summary>
public string Name { get; set; }
/// <summary>
/// Initializes a new instance of the <see cref="CacheConfiguration"/> class.
/// </summary>
public CacheConfiguration() : this((string) null)
{
// No-op.
}
/// <summary>
/// Initializes a new instance of the <see cref="CacheConfiguration"/> class.
/// </summary>
/// <param name="name">Cache name.</param>
public CacheConfiguration(string name)
{
Name = name;
Backups = DefaultBackups;
AtomicityMode = DefaultAtomicityMode;
CacheMode = DefaultCacheMode;
CopyOnRead = DefaultCopyOnRead;
WriteSynchronizationMode = DefaultWriteSynchronizationMode;
EagerTtl = DefaultEagerTtl;
Invalidate = DefaultInvalidate;
KeepBinaryInStore = DefaultKeepBinaryInStore;
LoadPreviousValue = DefaultLoadPreviousValue;
LockTimeout = DefaultLockTimeout;
#pragma warning disable 618
LongQueryWarningTimeout = DefaultLongQueryWarningTimeout;
#pragma warning restore 618
MaxConcurrentAsyncOperations = DefaultMaxConcurrentAsyncOperations;
ReadFromBackup = DefaultReadFromBackup;
RebalanceBatchSize = DefaultRebalanceBatchSize;
RebalanceMode = DefaultRebalanceMode;
RebalanceThrottle = DefaultRebalanceThrottle;
RebalanceTimeout = DefaultRebalanceTimeout;
WriteBehindBatchSize = DefaultWriteBehindBatchSize;
WriteBehindEnabled = DefaultWriteBehindEnabled;
WriteBehindFlushFrequency = DefaultWriteBehindFlushFrequency;
WriteBehindFlushSize = DefaultWriteBehindFlushSize;
WriteBehindFlushThreadCount= DefaultWriteBehindFlushThreadCount;
WriteBehindCoalescing = DefaultWriteBehindCoalescing;
PartitionLossPolicy = DefaultPartitionLossPolicy;
SqlIndexMaxInlineSize = DefaultSqlIndexMaxInlineSize;
StoreConcurrentLoadAllThreshold = DefaultStoreConcurrentLoadAllThreshold;
RebalanceOrder = DefaultRebalanceOrder;
RebalanceBatchesPrefetchCount = DefaultRebalanceBatchesPrefetchCount;
MaxQueryIteratorsCount = DefaultMaxQueryIteratorsCount;
QueryParallelism = DefaultQueryParallelism;
EncryptionEnabled = DefaultEncryptionEnabled;
}
/// <summary>
/// Initializes a new instance of the <see cref="CacheConfiguration"/> class
/// and populates <see cref="QueryEntities"/> according to provided query types.
/// This constructor is depricated, please use <see cref="CacheConfiguration(string, QueryEntity[])"/>
/// </summary>
/// <param name="name">Cache name.</param>
/// <param name="queryTypes">
/// Collection of types to be registered as query entities. These types should use
/// <see cref="QuerySqlFieldAttribute"/> to configure query fields and properties.
/// </param>
[Obsolete("This constructor is deprecated, please use CacheConfiguration(string, QueryEntity[]) instead.")]
public CacheConfiguration(string name, params Type[] queryTypes) : this(name)
{
QueryEntities = queryTypes.Select(type => new QueryEntity {ValueType = type}).ToArray();
}
/// <summary>
/// Initializes a new instance of the <see cref="CacheConfiguration"/> class.
/// </summary>
/// <param name="name">Cache name.</param>
/// <param name="queryEntities">Query entities.</param>
public CacheConfiguration(string name, params QueryEntity[] queryEntities) : this(name)
{
QueryEntities = queryEntities;
}
/// <summary>
/// Initializes a new instance of the <see cref="CacheConfiguration"/> class,
/// performing a deep copy of specified cache configuration.
/// </summary>
/// <param name="other">The other configuration to perfrom deep copy from.</param>
public CacheConfiguration(CacheConfiguration other)
{
if (other != null)
{
using (var stream = IgniteManager.Memory.Allocate().GetStream())
{
other.Write(BinaryUtils.Marshaller.StartMarshal(stream), ClientSocket.CurrentProtocolVersion);
stream.SynchronizeOutput();
stream.Seek(0, SeekOrigin.Begin);
Read(BinaryUtils.Marshaller.StartUnmarshal(stream), ClientSocket.CurrentProtocolVersion);
}
CopyLocalProperties(other);
}
}
/// <summary>
/// Initializes a new instance of the <see cref="CacheConfiguration"/> class.
/// </summary>
/// <param name="reader">The reader.</param>
/// <param name="srvVer">Server version.</param>
internal CacheConfiguration(BinaryReader reader, ClientProtocolVersion srvVer)
{
Read(reader, srvVer);
}
/// <summary>
/// Reads data into this instance from the specified reader.
/// </summary>
/// <param name="reader">The reader.</param>
/// <param name="srvVer">Server version.</param>
private void Read(BinaryReader reader, ClientProtocolVersion srvVer)
{
// Make sure system marshaller is used.
Debug.Assert(reader.Marshaller == BinaryUtils.Marshaller);
AtomicityMode = (CacheAtomicityMode) reader.ReadInt();
Backups = reader.ReadInt();
CacheMode = (CacheMode) reader.ReadInt();
CopyOnRead = reader.ReadBoolean();
EagerTtl = reader.ReadBoolean();
Invalidate = reader.ReadBoolean();
KeepBinaryInStore = reader.ReadBoolean();
LoadPreviousValue = reader.ReadBoolean();
LockTimeout = reader.ReadLongAsTimespan();
#pragma warning disable 618
LongQueryWarningTimeout = reader.ReadLongAsTimespan();
#pragma warning restore 618
MaxConcurrentAsyncOperations = reader.ReadInt();
Name = reader.ReadString();
ReadFromBackup = reader.ReadBoolean();
RebalanceBatchSize = reader.ReadInt();
RebalanceDelay = reader.ReadLongAsTimespan();
RebalanceMode = (CacheRebalanceMode) reader.ReadInt();
RebalanceThrottle = reader.ReadLongAsTimespan();
RebalanceTimeout = reader.ReadLongAsTimespan();
SqlEscapeAll = reader.ReadBoolean();
WriteBehindBatchSize = reader.ReadInt();
WriteBehindEnabled = reader.ReadBoolean();
WriteBehindFlushFrequency = reader.ReadLongAsTimespan();
WriteBehindFlushSize = reader.ReadInt();
WriteBehindFlushThreadCount = reader.ReadInt();
WriteBehindCoalescing = reader.ReadBoolean();
WriteSynchronizationMode = (CacheWriteSynchronizationMode) reader.ReadInt();
ReadThrough = reader.ReadBoolean();
WriteThrough = reader.ReadBoolean();
EnableStatistics = reader.ReadBoolean();
DataRegionName = reader.ReadString();
PartitionLossPolicy = (PartitionLossPolicy) reader.ReadInt();
GroupName = reader.ReadString();
CacheStoreFactory = reader.ReadObject<IFactory<ICacheStore>>();
SqlIndexMaxInlineSize = reader.ReadInt();
OnheapCacheEnabled = reader.ReadBoolean();
StoreConcurrentLoadAllThreshold = reader.ReadInt();
RebalanceOrder = reader.ReadInt();
RebalanceBatchesPrefetchCount = reader.ReadLong();
MaxQueryIteratorsCount = reader.ReadInt();
QueryDetailMetricsSize = reader.ReadInt();
QueryParallelism = reader.ReadInt();
SqlSchema = reader.ReadString();
EncryptionEnabled = reader.ReadBoolean();
QueryEntities = reader.ReadCollectionRaw(r => new QueryEntity(r, srvVer));
NearConfiguration = reader.ReadBoolean() ? new NearCacheConfiguration(reader) : null;
EvictionPolicy = EvictionPolicyBase.Read(reader);
AffinityFunction = AffinityFunctionSerializer.Read(reader);
ExpiryPolicyFactory = ExpiryPolicySerializer.ReadPolicyFactory(reader);
KeyConfiguration = reader.ReadCollectionRaw(r => new CacheKeyConfiguration(r));
var count = reader.ReadInt();
if (count > 0)
{
PluginConfigurations = new List<ICachePluginConfiguration>(count);
for (int i = 0; i < count; i++)
{
if (reader.ReadBoolean())
{
// FactoryId-based plugin: skip.
reader.ReadInt(); // Skip factory id.
var size = reader.ReadInt();
reader.Stream.Seek(size, SeekOrigin.Current); // Skip custom data.
}
else
{
// Pure .NET plugin.
PluginConfigurations.Add(reader.ReadObject<ICachePluginConfiguration>());
}
}
}
}
/// <summary>
/// Writes this instance to the specified writer.
/// </summary>
/// <param name="writer">The writer.</param>
/// <param name="srvVer">Server version.</param>
void IBinaryRawWriteAwareEx<BinaryWriter>.Write(BinaryWriter writer, ClientProtocolVersion srvVer)
{
Write(writer, srvVer);
}
/// <summary>
/// Writes this instance to the specified writer.
/// </summary>
/// <param name="writer">The writer.</param>
/// <param name="srvVer">Server version.</param>
internal void Write(BinaryWriter writer, ClientProtocolVersion srvVer)
{
// Make sure system marshaller is used.
Debug.Assert(writer.Marshaller == BinaryUtils.Marshaller);
writer.WriteInt((int) AtomicityMode);
writer.WriteInt(Backups);
writer.WriteInt((int) CacheMode);
writer.WriteBoolean(CopyOnRead);
writer.WriteBoolean(EagerTtl);
writer.WriteBoolean(Invalidate);
writer.WriteBoolean(KeepBinaryInStore);
writer.WriteBoolean(LoadPreviousValue);
writer.WriteLong((long) LockTimeout.TotalMilliseconds);
#pragma warning disable 618
writer.WriteLong((long) LongQueryWarningTimeout.TotalMilliseconds);
#pragma warning restore 618
writer.WriteInt(MaxConcurrentAsyncOperations);
writer.WriteString(Name);
writer.WriteBoolean(ReadFromBackup);
writer.WriteInt(RebalanceBatchSize);
writer.WriteLong((long) RebalanceDelay.TotalMilliseconds);
writer.WriteInt((int) RebalanceMode);
writer.WriteLong((long) RebalanceThrottle.TotalMilliseconds);
writer.WriteLong((long) RebalanceTimeout.TotalMilliseconds);
writer.WriteBoolean(SqlEscapeAll);
writer.WriteInt(WriteBehindBatchSize);
writer.WriteBoolean(WriteBehindEnabled);
writer.WriteLong((long) WriteBehindFlushFrequency.TotalMilliseconds);
writer.WriteInt(WriteBehindFlushSize);
writer.WriteInt(WriteBehindFlushThreadCount);
writer.WriteBoolean(WriteBehindCoalescing);
writer.WriteInt((int) WriteSynchronizationMode);
writer.WriteBoolean(ReadThrough);
writer.WriteBoolean(WriteThrough);
writer.WriteBoolean(EnableStatistics);
writer.WriteString(DataRegionName);
writer.WriteInt((int) PartitionLossPolicy);
writer.WriteString(GroupName);
writer.WriteObject(CacheStoreFactory);
writer.WriteInt(SqlIndexMaxInlineSize);
writer.WriteBoolean(OnheapCacheEnabled);
writer.WriteInt(StoreConcurrentLoadAllThreshold);
writer.WriteInt(RebalanceOrder);
writer.WriteLong(RebalanceBatchesPrefetchCount);
writer.WriteInt(MaxQueryIteratorsCount);
writer.WriteInt(QueryDetailMetricsSize);
writer.WriteInt(QueryParallelism);
writer.WriteString(SqlSchema);
writer.WriteBoolean(EncryptionEnabled);
writer.WriteCollectionRaw(QueryEntities, srvVer);
if (NearConfiguration != null)
{
writer.WriteBoolean(true);
NearConfiguration.Write(writer);
}
else
writer.WriteBoolean(false);
EvictionPolicyBase.Write(writer, EvictionPolicy);
AffinityFunctionSerializer.Write(writer, AffinityFunction);
ExpiryPolicySerializer.WritePolicyFactory(writer, ExpiryPolicyFactory);
writer.WriteCollectionRaw(KeyConfiguration);
if (PluginConfigurations != null)
{
writer.WriteInt(PluginConfigurations.Count);
foreach (var cachePlugin in PluginConfigurations)
{
if (cachePlugin == null)
throw new InvalidOperationException("Invalid cache configuration: " +
"ICachePluginConfiguration can't be null.");
if (cachePlugin.CachePluginConfigurationClosureFactoryId != null)
{
writer.WriteBoolean(true);
writer.WriteInt(cachePlugin.CachePluginConfigurationClosureFactoryId.Value);
int pos = writer.Stream.Position;
writer.WriteInt(0); // Reserve size.
cachePlugin.WriteBinary(writer);
writer.Stream.WriteInt(pos, writer.Stream.Position - pos - 4); // Write size.
}
else
{
writer.WriteBoolean(false);
writer.WriteObject(cachePlugin);
}
}
}
else
{
writer.WriteInt(0);
}
}
/// <summary>
/// Copies the local properties (properties that are not written in Write method).
/// </summary>
internal void CopyLocalProperties(CacheConfiguration cfg)
{
Debug.Assert(cfg != null);
PluginConfigurations = cfg.PluginConfigurations;
if (QueryEntities != null && cfg.QueryEntities != null)
{
var entities = cfg.QueryEntities.Where(x => x != null).ToDictionary(x => GetQueryEntityKey(x), x => x);
foreach (var entity in QueryEntities.Where(x => x != null))
{
QueryEntity src;
if (entities.TryGetValue(GetQueryEntityKey(entity), out src))
{
entity.CopyLocalProperties(src);
}
}
}
}
/// <summary>
/// Gets the query entity key.
/// </summary>
private static string GetQueryEntityKey(QueryEntity x)
{
return x.KeyTypeName + "^" + x.ValueTypeName;
}
/// <summary>
/// Validates this instance and outputs information to the log, if necessary.
/// </summary>
internal void Validate(ILogger log)
{
Debug.Assert(log != null);
var entities = QueryEntities;
if (entities != null)
{
foreach (var entity in entities)
entity.Validate(log, string.Format("Validating cache configuration '{0}'", Name ?? ""));
}
}
/// <summary>
/// Gets or sets write synchronization mode. This mode controls whether the main
/// caller should wait for update on other nodes to complete or not.
/// </summary>
[DefaultValue(DefaultWriteSynchronizationMode)]
public CacheWriteSynchronizationMode WriteSynchronizationMode { get; set; }
/// <summary>
/// Gets or sets flag indicating whether expired cache entries will be eagerly removed from cache.
/// When set to false, expired entries will be removed on next entry access.
/// </summary>
[DefaultValue(DefaultEagerTtl)]
public bool EagerTtl { get; set; }
/// <summary>
/// Gets or sets flag indicating whether value should be loaded from store if it is not in the cache
/// for the following cache operations:
/// <list type="bullet">
/// <item><term><see cref="ICache{TK,TV}.PutIfAbsent"/></term></item>
/// <item><term><see cref="ICache{TK,TV}.Replace(TK,TV)"/></term></item>
/// <item><term><see cref="ICache{TK,TV}.Remove(TK)"/></term></item>
/// <item><term><see cref="ICache{TK,TV}.GetAndPut"/></term></item>
/// <item><term><see cref="ICache{TK,TV}.GetAndRemove"/></term></item>
/// <item><term><see cref="ICache{TK,TV}.GetAndReplace"/></term></item>
/// <item><term><see cref="ICache{TK,TV}.GetAndPutIfAbsent"/></term></item>
/// </list>
/// </summary>
[DefaultValue(DefaultLoadPreviousValue)]
public bool LoadPreviousValue { get; set; }
/// <summary>
/// Gets or sets the flag indicating whether <see cref="ICacheStore"/> is working with binary objects
/// instead of deserialized objects.
/// </summary>
[DefaultValue(DefaultKeepBinaryInStore)]
public bool KeepBinaryInStore { get; set; }
/// <summary>
/// Gets or sets caching mode to use.
/// </summary>
[DefaultValue(DefaultCacheMode)]
public CacheMode CacheMode { get; set; }
/// <summary>
/// Gets or sets cache atomicity mode.
/// </summary>
[DefaultValue(DefaultAtomicityMode)]
public CacheAtomicityMode AtomicityMode { get; set; }
/// <summary>
/// Gets or sets number of nodes used to back up single partition for
/// <see cref="Configuration.CacheMode.Partitioned"/> cache.
/// </summary>
[DefaultValue(DefaultBackups)]
public int Backups { get; set; }
/// <summary>
/// Gets or sets default lock acquisition timeout.
/// </summary>
[DefaultValue(typeof(TimeSpan), "00:00:00")]
public TimeSpan LockTimeout { get; set; }
/// <summary>
/// Invalidation flag. If true, values will be invalidated (nullified) upon commit in near cache.
/// </summary>
[DefaultValue(DefaultInvalidate)]
public bool Invalidate { get; set; }
/// <summary>
/// Gets or sets cache rebalance mode.
/// </summary>
[DefaultValue(DefaultRebalanceMode)]
public CacheRebalanceMode RebalanceMode { get; set; }
/// <summary>
/// Gets or sets size (in number bytes) to be loaded within a single rebalance message.
/// Rebalancing algorithm will split total data set on every node into multiple batches prior to sending data.
/// </summary>
[DefaultValue(DefaultRebalanceBatchSize)]
public int RebalanceBatchSize { get; set; }
/// <summary>
/// Gets or sets maximum number of allowed concurrent asynchronous operations, 0 for unlimited.
/// </summary>
[DefaultValue(DefaultMaxConcurrentAsyncOperations)]
public int MaxConcurrentAsyncOperations { get; set; }
/// <summary>
/// Flag indicating whether Ignite should use write-behind behaviour for the cache store.
/// </summary>
[DefaultValue(DefaultWriteBehindEnabled)]
public bool WriteBehindEnabled { get; set; }
/// <summary>
/// Maximum size of the write-behind cache. If cache size exceeds this value, all cached items are flushed
/// to the cache store and write cache is cleared.
/// </summary>
[DefaultValue(DefaultWriteBehindFlushSize)]
public int WriteBehindFlushSize { get; set; }
/// <summary>
/// Frequency with which write-behind cache is flushed to the cache store.
/// This value defines the maximum time interval between object insertion/deletion from the cache
/// at the moment when corresponding operation is applied to the cache store.
/// <para/>
/// If this value is 0, then flush is performed according to the flush size.
/// <para/>
/// Note that you cannot set both
/// <see cref="WriteBehindFlushSize"/> and <see cref="WriteBehindFlushFrequency"/> to 0.
/// </summary>
[DefaultValue(typeof(TimeSpan), "00:00:05")]
public TimeSpan WriteBehindFlushFrequency { get; set; }
/// <summary>
/// Number of threads that will perform cache flushing. Cache flushing is performed when cache size exceeds
/// value defined by <see cref="WriteBehindFlushSize"/>, or flush interval defined by
/// <see cref="WriteBehindFlushFrequency"/> is elapsed.
/// </summary>
[DefaultValue(DefaultWriteBehindFlushThreadCount)]
public int WriteBehindFlushThreadCount { get; set; }
/// <summary>
/// Maximum batch size for write-behind cache store operations.
/// Store operations (get or remove) are combined in a batch of this size to be passed to
/// <see cref="ICacheStore{K, V}.WriteAll"/> or <see cref="ICacheStore{K, V}.DeleteAll"/> methods.
/// </summary>
[DefaultValue(DefaultWriteBehindBatchSize)]
public int WriteBehindBatchSize { get; set; }
/// <summary>
/// Gets or sets rebalance timeout.
/// </summary>
[DefaultValue(typeof(TimeSpan), "00:00:10")]
public TimeSpan RebalanceTimeout { get; set; }
/// <summary>
/// Gets or sets delay upon a node joining or leaving topology (or crash)
/// after which rebalancing should be started automatically.
/// Rebalancing should be delayed if you plan to restart nodes
/// after they leave topology, or if you plan to start multiple nodes at once or one after another
/// and don't want to repartition and rebalance until all nodes are started.
/// </summary>
public TimeSpan RebalanceDelay { get; set; }
/// <summary>
/// Time to wait between rebalance messages to avoid overloading of CPU or network.
/// When rebalancing large data sets, the CPU or network can get over-consumed with rebalancing messages,
/// which consecutively may slow down the application performance. This parameter helps tune
/// the amount of time to wait between rebalance messages to make sure that rebalancing process
/// does not have any negative performance impact. Note that application will continue to work
/// properly while rebalancing is still in progress.
/// <para/>
/// Value of 0 means that throttling is disabled.
/// </summary>
public TimeSpan RebalanceThrottle { get; set; }
/// <summary>
/// Gets or sets flag indicating whether data can be read from backup.
/// </summary>
[DefaultValue(DefaultReadFromBackup)]
public bool ReadFromBackup { get; set; }
/// <summary>
/// Gets or sets flag indicating whether copy of the value stored in cache should be created
/// for cache operation implying return value.
/// </summary>
[DefaultValue(DefaultCopyOnRead)]
public bool CopyOnRead { get; set; }
/// <summary>
/// Gets or sets the timeout after which long query warning will be printed.
/// <para />
/// This property is obsolete, use <see cref="IgniteConfiguration.LongQueryWarningTimeout"/> instead.
/// </summary>
[DefaultValue(typeof(TimeSpan), "00:00:03")]
[Obsolete("Use IgniteConfiguration.LongQueryWarningTimeout instead.")]
public TimeSpan LongQueryWarningTimeout { get; set; }
/// <summary>
/// If true all the SQL table and field names will be escaped with double quotes like
/// ({ "tableName"."fieldsName"}). This enforces case sensitivity for field names and
/// also allows having special characters in table and field names.
/// </summary>
public bool SqlEscapeAll { get; set; }
/// <summary>
/// Gets or sets the factory for underlying persistent storage for read-through and write-through operations.
/// <para />
/// See <see cref="ReadThrough"/> and <see cref="WriteThrough"/> properties to enable read-through and
/// write-through behavior so that cache store is invoked on get and/or put operations.
/// <para />
/// If both <see cref="ReadThrough"/> and <see cref="WriteThrough"/> are <code>false</code>, cache store
/// will be invoked only on <see cref="ICache{TK,TV}.LoadCache"/> calls.
/// </summary>
public IFactory<ICacheStore> CacheStoreFactory { get; set; }
/// <summary>
/// Gets or sets a value indicating whether read-through should be enabled for cache operations.
/// <para />
/// When in read-through mode, cache misses that occur due to cache entries not existing
/// as a result of performing a "get" operations will appropriately cause the
/// configured <see cref="ICacheStore"/> (see <see cref="CacheStoreFactory"/>) to be invoked.
/// </summary>
[DefaultValue(DefaultReadThrough)]
public bool ReadThrough { get; set; }
/// <summary>
/// Gets or sets a value indicating whether write-through should be enabled for cache operations.
/// <para />
/// When in "write-through" mode, cache updates that occur as a result of performing "put" operations
/// will appropriately cause the configured
/// <see cref="ICacheStore"/> (see <see cref="CacheStoreFactory"/>) to be invoked.
/// </summary>
[DefaultValue(DefaultWriteThrough)]
public bool WriteThrough { get; set; }
/// <summary>
/// Gets or sets the query entity configuration.
/// </summary>
[SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public ICollection<QueryEntity> QueryEntities { get; set; }
/// <summary>
/// Gets or sets the near cache configuration.
/// </summary>
public NearCacheConfiguration NearConfiguration { get; set; }
/// <summary>
/// Gets or sets the eviction policy.
/// Null value means disabled evictions.
/// </summary>
public IEvictionPolicy EvictionPolicy { get; set; }
/// <summary>
/// Gets or sets the affinity function to provide mapping from keys to nodes.
/// <para />
/// Predefined implementations:
/// <see cref="RendezvousAffinityFunction"/>.
/// </summary>
public IAffinityFunction AffinityFunction { get; set; }
/// <summary>
/// Gets or sets the factory for <see cref="IExpiryPolicy"/> to be used for all cache operations,
/// unless <see cref="ICache{TK,TV}.WithExpiryPolicy"/> is called.
/// <para />
/// Default is null, which means no expiration.
/// </summary>
public IFactory<IExpiryPolicy> ExpiryPolicyFactory { get; set; }
/// <summary>
/// Gets or sets a value indicating whether statistics gathering is enabled on a cache.
/// These statistics can be retrieved via <see cref="ICache{TK,TV}.GetMetrics()"/>.
/// </summary>
public bool EnableStatistics { get; set; }
/// <summary>
/// Gets or sets the plugin configurations.
/// </summary>
[SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public ICollection<ICachePluginConfiguration> PluginConfigurations { get; set; }
/// <summary>
/// Gets or sets the name of the <see cref="MemoryPolicyConfiguration"/> for this cache.
/// See <see cref="IgniteConfiguration.MemoryConfiguration"/>.
/// </summary>
[Obsolete("Use DataRegionName.")]
[XmlIgnore]
public string MemoryPolicyName
{
get { return DataRegionName; }
set { DataRegionName = value; }
}
/// <summary>
/// Gets or sets the name of the data region, see <see cref="DataRegionConfiguration"/>.
/// </summary>
public string DataRegionName { get; set; }
/// <summary>
/// Gets or sets write coalescing flag for write-behind cache store operations.
/// Store operations (get or remove) with the same key are combined or coalesced to single,
/// resulting operation to reduce pressure to underlying cache store.
/// </summary>
[DefaultValue(DefaultWriteBehindCoalescing)]
public bool WriteBehindCoalescing { get; set; }
/// <summary>
/// Gets or sets the partition loss policy. This policy defines how Ignite will react to
/// a situation when all nodes for some partition leave the cluster.
/// </summary>
[DefaultValue(DefaultPartitionLossPolicy)]
public PartitionLossPolicy PartitionLossPolicy { get; set; }
/// <summary>
/// Gets or sets the cache group name. Caches with the same group name share single underlying 'physical'
/// cache (partition set), but are logically isolated.
/// <para />
/// Since underlying cache is shared, the following configuration properties should be the same within group:
/// <see cref="AffinityFunction"/>, <see cref="CacheMode"/>, <see cref="PartitionLossPolicy"/>,
/// <see cref="DataRegionName"/>
/// <para />
/// Grouping caches reduces overall overhead, since internal data structures are shared.
/// </summary>
public string GroupName { get;set; }
/// <summary>
/// Gets or sets maximum inline size in bytes for sql indexes. See also <see cref="QueryIndex.InlineSize"/>.
/// -1 for automatic.
/// </summary>
[DefaultValue(DefaultSqlIndexMaxInlineSize)]
public int SqlIndexMaxInlineSize { get; set; }
/// <summary>
/// Gets or sets the key configuration.
/// </summary>
[SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public ICollection<CacheKeyConfiguration> KeyConfiguration { get; set; }
/// <summary>
/// Gets or sets a value indicating whether on-heap cache is enabled for the off-heap based page memory.
/// </summary>
public bool OnheapCacheEnabled { get; set; }
/// <summary>
/// Gets or sets the threshold to use when multiple keys are being loaded from an underlying cache store
/// (see <see cref="CacheStoreFactory"/>).
///
/// In the situation when several threads load the same or intersecting set of keys
/// and the total number of keys to load is less or equal to this threshold then there will be no
/// second call to the storage in order to load a key from thread A if the same key is already being
/// loaded by thread B.
///
/// The threshold should be controlled wisely. On the one hand if it's set to a big value then the
/// interaction with a storage during the load of missing keys will be minimal.On the other hand the big
/// value may result in significant performance degradation because it is needed to check
/// for every key whether it's being loaded or not.
/// </summary>
[DefaultValue(DefaultStoreConcurrentLoadAllThreshold)]
public int StoreConcurrentLoadAllThreshold { get; set; }
/// <summary>
/// Gets or sets the cache rebalance order. Caches with bigger RebalanceOrder are rebalanced later than caches
/// with smaller RebalanceOrder.
/// <para />
/// Default is 0, which means unordered rebalance. All caches with RebalanceOrder=0 are rebalanced without any
/// delay concurrently.
/// <para />
/// This parameter is applicable only for caches with <see cref="RebalanceMode"/> of
/// <see cref="CacheRebalanceMode.Sync"/> and <see cref="CacheRebalanceMode.Async"/>.
/// </summary>
[DefaultValue(DefaultRebalanceOrder)]
public int RebalanceOrder { get; set; }
/// <summary>
/// Gets or sets the rebalance batches prefetch count.
/// <para />
/// Source node can provide more than one batch at rebalance start to improve performance.
/// Default is <see cref="DefaultRebalanceBatchesPrefetchCount"/>, minimum is 2.
/// </summary>
[DefaultValue(DefaultRebalanceBatchesPrefetchCount)]
public long RebalanceBatchesPrefetchCount { get; set; }
/// <summary>
/// Gets or sets the maximum number of active query iterators.
/// </summary>
[DefaultValue(DefaultMaxQueryIteratorsCount)]
public int MaxQueryIteratorsCount { get; set; }
/// <summary>
/// Gets or sets the size of the query detail metrics to be stored in memory.
/// <para />
/// 0 means disabled metrics.
/// </summary>
[DefaultValue(DefaultQueryDetailMetricsSize)]
public int QueryDetailMetricsSize { get; set; }
/// <summary>
/// Gets or sets the SQL schema.
/// Non-quoted identifiers are not case sensitive. Quoted identifiers are case sensitive.
/// <para />
/// Quoted <see cref="Name"/> is used by default.
/// </summary>
public string SqlSchema { get; set; }
/// <summary>
/// Gets or sets the desired query parallelism within a single node.
/// Query executor may or may not use this hint, depending on estimated query cost.
/// <para />
/// Default is <see cref="DefaultQueryParallelism"/>.
/// </summary>
[DefaultValue(DefaultQueryParallelism)]
public int QueryParallelism { get; set; }
/// <summary>
/// Gets or sets encryption flag.
/// Default is false.
/// </summary>
[DefaultValue(DefaultEncryptionEnabled)]
public bool EncryptionEnabled { get; set; }
}
}
| shroman/ignite | modules/platforms/dotnet/Apache.Ignite.Core/Cache/Configuration/CacheConfiguration.cs | C# | apache-2.0 | 42,848 |
/*
* Copyright 2016 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.ssl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.security.PrivateKey;
import io.netty.buffer.UnpooledByteBufAllocator;
import org.junit.Test;
import io.netty.handler.ssl.util.SelfSignedCertificate;
import io.netty.util.ReferenceCountUtil;
public class PemEncodedTest {
@Test
public void testPemEncodedOpenSsl() throws Exception {
testPemEncoded(SslProvider.OPENSSL);
}
@Test
public void testPemEncodedOpenSslRef() throws Exception {
testPemEncoded(SslProvider.OPENSSL_REFCNT);
}
private static void testPemEncoded(SslProvider provider) throws Exception {
assumeTrue(OpenSsl.isAvailable());
assumeFalse(OpenSsl.useKeyManagerFactory());
PemPrivateKey pemKey;
PemX509Certificate pemCert;
SelfSignedCertificate ssc = new SelfSignedCertificate();
try {
pemKey = PemPrivateKey.valueOf(toByteArray(ssc.privateKey()));
pemCert = PemX509Certificate.valueOf(toByteArray(ssc.certificate()));
} finally {
ssc.delete();
}
SslContext context = SslContextBuilder.forServer(pemKey, pemCert)
.sslProvider(provider)
.build();
assertEquals(1, pemKey.refCnt());
assertEquals(1, pemCert.refCnt());
try {
assertTrue(context instanceof ReferenceCountedOpenSslContext);
} finally {
ReferenceCountUtil.release(context);
assertRelease(pemKey);
assertRelease(pemCert);
}
}
@Test(expected = IllegalArgumentException.class)
public void testEncodedReturnsNull() throws Exception {
PemPrivateKey.toPEM(UnpooledByteBufAllocator.DEFAULT, true, new PrivateKey() {
@Override
public String getAlgorithm() {
return null;
}
@Override
public String getFormat() {
return null;
}
@Override
public byte[] getEncoded() {
return null;
}
});
}
private static void assertRelease(PemEncoded encoded) {
assertTrue(encoded.release());
}
private static byte[] toByteArray(File file) throws Exception {
FileInputStream in = new FileInputStream(file);
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) != -1) {
baos.write(buf, 0, len);
}
} finally {
baos.close();
}
return baos.toByteArray();
} finally {
in.close();
}
}
}
| zer0se7en/netty | handler/src/test/java/io/netty/handler/ssl/PemEncodedTest.java | Java | apache-2.0 | 3,656 |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.filter;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Range;
import com.google.common.collect.RangeSet;
import com.google.common.collect.TreeRangeSet;
import com.google.common.primitives.Floats;
import io.druid.common.guava.GuavaUtils;
import io.druid.java.util.common.StringUtils;
import io.druid.query.extraction.ExtractionFn;
import io.druid.segment.filter.DimensionPredicateFilter;
import io.druid.segment.filter.SelectorFilter;
import java.nio.ByteBuffer;
import java.util.Objects;
/**
*/
public class SelectorDimFilter implements DimFilter
{
private final String dimension;
private final String value;
private final ExtractionFn extractionFn;
private final Object initLock = new Object();
private DruidLongPredicate longPredicate;
private DruidFloatPredicate floatPredicate;
@JsonCreator
public SelectorDimFilter(
@JsonProperty("dimension") String dimension,
@JsonProperty("value") String value,
@JsonProperty("extractionFn") ExtractionFn extractionFn
)
{
Preconditions.checkArgument(dimension != null, "dimension must not be null");
this.dimension = dimension;
this.value = Strings.nullToEmpty(value);
this.extractionFn = extractionFn;
}
@Override
public byte[] getCacheKey()
{
byte[] dimensionBytes = StringUtils.toUtf8(dimension);
byte[] valueBytes = (value == null) ? new byte[]{} : StringUtils.toUtf8(value);
byte[] extractionFnBytes = extractionFn == null ? new byte[0] : extractionFn.getCacheKey();
return ByteBuffer.allocate(3 + dimensionBytes.length + valueBytes.length + extractionFnBytes.length)
.put(DimFilterUtils.SELECTOR_CACHE_ID)
.put(dimensionBytes)
.put(DimFilterUtils.STRING_SEPARATOR)
.put(valueBytes)
.put(DimFilterUtils.STRING_SEPARATOR)
.put(extractionFnBytes)
.array();
}
@Override
public DimFilter optimize()
{
return new InDimFilter(dimension, ImmutableList.of(value), extractionFn).optimize();
}
@Override
public Filter toFilter()
{
if (extractionFn == null) {
return new SelectorFilter(dimension, value);
} else {
final String valueOrNull = Strings.emptyToNull(value);
final DruidPredicateFactory predicateFactory = new DruidPredicateFactory()
{
@Override
public Predicate<String> makeStringPredicate()
{
return Predicates.equalTo(valueOrNull);
}
@Override
public DruidLongPredicate makeLongPredicate()
{
initLongPredicate();
return longPredicate;
}
@Override
public DruidFloatPredicate makeFloatPredicate()
{
initFloatPredicate();
return floatPredicate;
}
};
return new DimensionPredicateFilter(dimension, predicateFactory, extractionFn);
}
}
@JsonProperty
public String getDimension()
{
return dimension;
}
@JsonProperty
public String getValue()
{
return value;
}
@JsonProperty
public ExtractionFn getExtractionFn()
{
return extractionFn;
}
@Override
public String toString()
{
if (extractionFn != null) {
return String.format("%s(%s) = %s", extractionFn, dimension, value);
} else {
return String.format("%s = %s", dimension, value);
}
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SelectorDimFilter that = (SelectorDimFilter) o;
if (!dimension.equals(that.dimension)) {
return false;
}
if (value != null ? !value.equals(that.value) : that.value != null) {
return false;
}
return extractionFn != null ? extractionFn.equals(that.extractionFn) : that.extractionFn == null;
}
@Override
public RangeSet<String> getDimensionRangeSet(String dimension)
{
if (!Objects.equals(getDimension(), dimension) || getExtractionFn() != null) {
return null;
}
RangeSet<String> retSet = TreeRangeSet.create();
retSet.add(Range.singleton(Strings.nullToEmpty(value)));
return retSet;
}
@Override
public int hashCode()
{
int result = dimension.hashCode();
result = 31 * result + (value != null ? value.hashCode() : 0);
result = 31 * result + (extractionFn != null ? extractionFn.hashCode() : 0);
return result;
}
private void initLongPredicate()
{
if (longPredicate != null) {
return;
}
synchronized (initLock) {
if (longPredicate != null) {
return;
}
final Long valueAsLong = GuavaUtils.tryParseLong(value);
if (valueAsLong == null) {
longPredicate = DruidLongPredicate.ALWAYS_FALSE;
} else {
// store the primitive, so we don't unbox for every comparison
final long unboxedLong = valueAsLong.longValue();
longPredicate = new DruidLongPredicate()
{
@Override
public boolean applyLong(long input)
{
return input == unboxedLong;
}
};
}
}
}
private void initFloatPredicate()
{
if (floatPredicate != null) {
return;
}
synchronized (initLock) {
if (floatPredicate != null) {
return;
}
final Float valueAsFloat = Floats.tryParse(value);
if (valueAsFloat == null) {
floatPredicate = DruidFloatPredicate.ALWAYS_FALSE;
} else {
final int floatBits = Float.floatToIntBits(valueAsFloat);
floatPredicate = new DruidFloatPredicate()
{
@Override
public boolean applyFloat(float input)
{
return Float.floatToIntBits(input) == floatBits;
}
};
}
}
}
}
| zhihuij/druid | processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java | Java | apache-2.0 | 6,989 |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package com.android.location.provider;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.hardware.location.IActivityRecognitionHardware;
import android.hardware.location.IActivityRecognitionHardwareWatcher;
import android.os.Binder;
import android.os.IBinder;
import android.os.Process;
import android.os.RemoteException;
import android.util.Log;
/**
* A watcher class for Activity-Recognition instances.
*
* @deprecated use {@link ActivityRecognitionProviderClient} instead.
*/
@Deprecated
public class ActivityRecognitionProviderWatcher {
private static final String TAG = "ActivityRecognitionProviderWatcher";
private static ActivityRecognitionProviderWatcher sWatcher;
private static final Object sWatcherLock = new Object();
private ActivityRecognitionProvider mActivityRecognitionProvider;
private ActivityRecognitionProviderWatcher() {}
public static ActivityRecognitionProviderWatcher getInstance() {
synchronized (sWatcherLock) {
if (sWatcher == null) {
sWatcher = new ActivityRecognitionProviderWatcher();
}
return sWatcher;
}
}
private IActivityRecognitionHardwareWatcher.Stub mWatcherStub =
new IActivityRecognitionHardwareWatcher.Stub() {
@Override
public void onInstanceChanged(IActivityRecognitionHardware instance) {
int callingUid = Binder.getCallingUid();
if (callingUid != Process.SYSTEM_UID) {
Log.d(TAG, "Ignoring calls from non-system server. Uid: " + callingUid);
return;
}
try {
mActivityRecognitionProvider = new ActivityRecognitionProvider(instance);
} catch (RemoteException e) {
Log.e(TAG, "Error creating Hardware Activity-Recognition", e);
}
}
};
/**
* Gets the binder needed to interact with proxy provider in the platform.
*/
@NonNull
public IBinder getBinder() {
return mWatcherStub;
}
/**
* Gets an object that supports the functionality of {@link ActivityRecognitionProvider}.
*
* @return Non-null value if the functionality is supported by the platform, false otherwise.
*/
@Nullable
public ActivityRecognitionProvider getActivityRecognitionProvider() {
return mActivityRecognitionProvider;
}
}
| Ant-Droid/android_frameworks_base_OLD | location/lib/java/com/android/location/provider/ActivityRecognitionProviderWatcher.java | Java | apache-2.0 | 3,055 |
---
id: version-2.6.0-admin-api-brokers
title: Managing Brokers
sidebar_label: Brokers
original_id: admin-api-brokers
---
Pulsar brokers consist of two components:
1. An HTTP server exposing a {@inject: rest:REST:/} interface administration and [topic](reference-terminology.md#topic) lookup.
2. A dispatcher that handles all Pulsar [message](reference-terminology.md#message) transfers.
[Brokers](reference-terminology.md#broker) can be managed via:
* The [`brokers`](reference-pulsar-admin.md#brokers) command of the [`pulsar-admin`](reference-pulsar-admin.md) tool
* The `/admin/v2/brokers` endpoint of the admin {@inject: rest:REST:/} API
* The `brokers` method of the {@inject: javadoc:PulsarAdmin:/admin/org/apache/pulsar/client/admin/PulsarAdmin.html} object in the [Java API](client-libraries-java.md)
In addition to being configurable when you start them up, brokers can also be [dynamically configured](#dynamic-broker-configuration).
> See the [Configuration](reference-configuration.md#broker) page for a full listing of broker-specific configuration parameters.
## Brokers resources
### List active brokers
Fetch all available active brokers that are serving traffic.
#### pulsar-admin
```shell
$ pulsar-admin brokers list use
```
```
broker1.use.org.com:8080
```
###### REST
{@inject: endpoint|GET|/admin/v2/brokers/:cluster|operation/getActiveBrokers?version=[[pulsar:version_number]]}
###### Java
```java
admin.brokers().getActiveBrokers(clusterName)
```
#### list of namespaces owned by a given broker
It finds all namespaces which are owned and served by a given broker.
###### CLI
```shell
$ pulsar-admin brokers namespaces use \
--url broker1.use.org.com:8080
```
```json
{
"my-property/use/my-ns/0x00000000_0xffffffff": {
"broker_assignment": "shared",
"is_controlled": false,
"is_active": true
}
}
```
###### REST
{@inject: endpoint|GET|/admin/v2/brokers/:cluster/:broker/ownedNamespaces|operation/getOwnedNamespaes?version=[[pulsar:version_number]]}
###### Java
```java
admin.brokers().getOwnedNamespaces(cluster,brokerUrl);
```
### Dynamic broker configuration
One way to configure a Pulsar [broker](reference-terminology.md#broker) is to supply a [configuration](reference-configuration.md#broker) when the broker is [started up](reference-cli-tools.md#pulsar-broker).
But since all broker configuration in Pulsar is stored in ZooKeeper, configuration values can also be dynamically updated *while the broker is running*. When you update broker configuration dynamically, ZooKeeper will notify the broker of the change and the broker will then override any existing configuration values.
* The [`brokers`](reference-pulsar-admin.md#brokers) command for the [`pulsar-admin`](reference-pulsar-admin.md) tool has a variety of subcommands that enable you to manipulate a broker's configuration dynamically, enabling you to [update config values](#update-dynamic-configuration) and more.
* In the Pulsar admin {@inject: rest:REST:/} API, dynamic configuration is managed through the `/admin/v2/brokers/configuration` endpoint.
### Update dynamic configuration
#### pulsar-admin
The [`update-dynamic-config`](reference-pulsar-admin.md#brokers-update-dynamic-config) subcommand will update existing configuration. It takes two arguments: the name of the parameter and the new value using the `config` and `value` flag respectively. Here's an example for the [`brokerShutdownTimeoutMs`](reference-configuration.md#broker-brokerShutdownTimeoutMs) parameter:
```shell
$ pulsar-admin brokers update-dynamic-config --config brokerShutdownTimeoutMs --value 100
```
#### REST API
{@inject: endpoint|POST|/admin/v2/brokers/configuration/:configName/:configValue|operation/updateDynamicConfiguration?version=[[pulsar:version_number]]}
#### Java
```java
admin.brokers().updateDynamicConfiguration(configName, configValue);
```
### List updated values
Fetch a list of all potentially updatable configuration parameters.
#### pulsar-admin
```shell
$ pulsar-admin brokers list-dynamic-config
brokerShutdownTimeoutMs
```
#### REST API
{@inject: endpoint|GET|/admin/v2/brokers/configuration|operation/getDynamicConfigurationName?version=[[pulsar:version_number]]}
#### Java
```java
admin.brokers().getDynamicConfigurationNames();
```
### List all
Fetch a list of all parameters that have been dynamically updated.
#### pulsar-admin
```shell
$ pulsar-admin brokers get-all-dynamic-config
brokerShutdownTimeoutMs:100
```
#### REST API
{@inject: endpoint|GET|/admin/v2/brokers/configuration/values|operation/getAllDynamicConfigurations?version=[[pulsar:version_number]]}
#### Java
```java
admin.brokers().getAllDynamicConfigurations();
```
| massakam/pulsar | site2/website/versioned_docs/version-2.6.0/admin-api-brokers.md | Markdown | apache-2.0 | 4,711 |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.adapter;
import javax.cache.processor.EntryProcessor;
import javax.cache.processor.EntryProcessorException;
import javax.cache.processor.MutableEntry;
import java.io.Serializable;
public class ICacheReplaceEntryProcessor implements EntryProcessor<Integer, String, String>, Serializable {
private static final long serialVersionUID = -396575576353368113L;
@Override
public String process(MutableEntry<Integer, String> entry, Object... arguments) throws EntryProcessorException {
String value = entry.getValue();
if (value == null) {
return null;
}
String oldString = (String) arguments[0];
String newString = (String) arguments[1];
String result = value.replace(oldString, newString);
entry.setValue(result);
return result;
}
}
| tombujok/hazelcast | hazelcast/src/test/java/com/hazelcast/internal/adapter/ICacheReplaceEntryProcessor.java | Java | apache-2.0 | 1,482 |
/*<license>
Copyright 2005 - $Date$ by PeopleWare n.v..
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
</license>*/
package org.ppwcode.vernacular.persistence_III;
import static org.ppwcode.metainfo_I.License.Type.APACHE_V2;
import java.io.Serializable;
import org.ppwcode.metainfo_I.Copyright;
import org.ppwcode.metainfo_I.License;
import org.ppwcode.metainfo_I.vcs.SvnInfo;
import org.ppwcode.vernacular.semantics_VI.bean.AbstractRousseauBean;
/**
* A partial implementation of the interface {@link PersistentBean}.
*
* @author Nele Smeets
* @author Ruben Vandeginste
* @author Jan Dockx
* @author PeopleWare n.v.
*
* @mudo We now have a dependency here on JPA via annotations. Also, the listener is defined in a subpackage, which
* depends on this package. This introduces a cycle! This is a bad idea. Like this, you always need the JPA
* libraries, even if they are annotations, because the annotations are loaded in the import statements too
* (at least under 1.5). Thus, the annotations must go, and we need to use the xml files.
*/
@Copyright("2004 - $Date$, PeopleWare n.v.")
@License(APACHE_V2)
@SvnInfo(revision = "$Revision$",
date = "$Date$")
public abstract class AbstractPersistentBean<_Id_ extends Serializable> extends AbstractRousseauBean
implements PersistentBean<_Id_> {
/*<property name="id">*/
//------------------------------------------------------------------
public final _Id_ getPersistenceId() {
return $persistenceId;
}
public final boolean hasSamePersistenceId(final PersistentBean<_Id_> other) {
return (other != null) && ((getPersistenceId() == null) ? other.getPersistenceId() == null : getPersistenceId().equals(other.getPersistenceId()));
}
public final void setPersistenceId(final _Id_ persistenceId) {
$persistenceId = persistenceId;
}
// @Id
// @GeneratedValue
// @Column(name="persistenceId")
private _Id_ $persistenceId;
/*</property>*/
}
| jandppw/ppwcode-recovered-from-google-code | java/vernacular/persistence/dev/d20081014-1359/src/main/java/org/ppwcode/vernacular/persistence_III/AbstractPersistentBean.java | Java | apache-2.0 | 2,465 |
/*
* Copyright (c) 2019 Intel Corporation
*
* SPDX-License-Identifier: Apache-2.0
*/
#ifndef ZEPHYR_INCLUDE_ARCH_X86_MULTIBOOT_H_
#define ZEPHYR_INCLUDE_ARCH_X86_MULTIBOOT_H_
#ifndef _ASMLANGUAGE
/*
* Multiboot (version 1) boot information structure.
*
* Only fields/values of interest to Zephyr are enumerated: at
* present, that means only those pertaining to the framebuffer.
*/
struct multiboot_info {
uint32_t flags;
uint32_t mem_lower;
uint32_t mem_upper;
uint32_t unused0[8];
uint32_t mmap_length;
uint32_t mmap_addr;
uint32_t unused1[9];
uint32_t fb_addr_lo;
uint32_t fb_addr_hi;
uint32_t fb_pitch;
uint32_t fb_width;
uint32_t fb_height;
uint8_t fb_bpp;
uint8_t fb_type;
uint8_t fb_color_info[6];
};
extern struct multiboot_info multiboot_info;
extern void z_multiboot_init(struct multiboot_info *info_pa);
/*
* the mmap_addr field points to a series of entries of the following form.
*/
struct multiboot_mmap {
uint32_t size;
uint64_t base;
uint64_t length;
uint32_t type;
} __packed;
#endif /* _ASMLANGUAGE */
/*
* Possible values for multiboot_mmap.type field.
* Other values should be assumed to be unusable ranges.
*/
#define MULTIBOOT_MMAP_RAM 1 /* available RAM */
#define MULTIBOOT_MMAP_ACPI 3 /* reserved for ACPI */
#define MULTIBOOT_MMAP_NVS 4 /* ACPI non-volatile */
#define MULTIBOOT_MMAP_DEFECTIVE 5 /* defective RAM module */
/*
* Magic numbers: the kernel multiboot header (see crt0.S) begins with
* MULTIBOOT_HEADER_MAGIC to signal to the booter that it supports
* multiboot. On kernel entry, EAX is set to MULTIBOOT_EAX_MAGIC to
* signal that the boot loader is multiboot compliant.
*/
#define MULTIBOOT_HEADER_MAGIC 0x1BADB002
#define MULTIBOOT_EAX_MAGIC 0x2BADB002
/*
* Typically, we put no flags in the multiboot header, as it exists solely
* to reassure the loader that we're a valid binary. The exception to this
* is when we want the loader to configure the framebuffer for us.
*/
#define MULTIBOOT_HEADER_FLAG_MEM BIT(1) /* want mem_/mmap_* info */
#define MULTIBOOT_HEADER_FLAG_FB BIT(2) /* want fb_* info */
#ifdef CONFIG_MULTIBOOT_FRAMEBUF
#define MULTIBOOT_HEADER_FLAGS \
(MULTIBOOT_HEADER_FLAG_FB | MULTIBOOT_HEADER_FLAG_MEM)
#else
#define MULTIBOOT_HEADER_FLAGS MULTIBOOT_HEADER_FLAG_MEM
#endif
/* The flags in the boot info structure tell us which fields are valid. */
#define MULTIBOOT_INFO_FLAGS_MEM (1 << 0) /* mem_* valid */
#define MULTIBOOT_INFO_FLAGS_MMAP (1 << 6) /* mmap_* valid */
#define MULTIBOOT_INFO_FLAGS_FB (1 << 12) /* fb_* valid */
/* The only fb_type we support is RGB. No text modes and no color palettes. */
#define MULTIBOOT_INFO_FB_TYPE_RGB 1
#endif /* ZEPHYR_INCLUDE_ARCH_X86_MULTIBOOT_H_ */
| zephyrproject-rtos/zephyr | include/arch/x86/multiboot.h | C | apache-2.0 | 2,729 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""NEC plugin sharednet
Revision ID: 3b54bf9e29f7
Revises: 511471cc46b
Create Date: 2013-02-17 09:21:48.287134
"""
# revision identifiers, used by Alembic.
revision = '3b54bf9e29f7'
down_revision = '511471cc46b'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.nec.nec_plugin.NECPluginV2'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugin=None, options=None):
if not migration.should_run(active_plugin, migration_for_plugins):
return
op.create_table(
'ofctenantmappings',
sa.Column('ofc_id', sa.String(length=255), nullable=False),
sa.Column('quantum_id', sa.String(length=36), nullable=False),
sa.PrimaryKeyConstraint('quantum_id'),
sa.UniqueConstraint('ofc_id')
)
op.create_table(
'ofcnetworkmappings',
sa.Column('ofc_id', sa.String(length=255), nullable=False),
sa.Column('quantum_id', sa.String(length=36), nullable=False),
sa.PrimaryKeyConstraint('quantum_id'),
sa.UniqueConstraint('ofc_id')
)
op.create_table(
'ofcportmappings',
sa.Column('ofc_id', sa.String(length=255), nullable=False),
sa.Column('quantum_id', sa.String(length=36), nullable=False),
sa.PrimaryKeyConstraint('quantum_id'),
sa.UniqueConstraint('ofc_id')
)
op.create_table(
'ofcfiltermappings',
sa.Column('ofc_id', sa.String(length=255), nullable=False),
sa.Column('quantum_id', sa.String(length=36), nullable=False),
sa.PrimaryKeyConstraint('quantum_id'),
sa.UniqueConstraint('ofc_id')
)
def downgrade(active_plugin=None, options=None):
if not migration.should_run(active_plugin, migration_for_plugins):
return
op.drop_table('ofcfiltermappings')
op.drop_table('ofcportmappings')
op.drop_table('ofcnetworkmappings')
op.drop_table('ofctenantmappings')
| Brocade-OpenSource/OpenStack-DNRM-Neutron | neutron/db/migration/alembic_migrations/versions/3b54bf9e29f7_nec_plugin_sharednet.py | Python | apache-2.0 | 2,645 |
/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.config.builders;
import org.ehcache.CacheManager;
import org.ehcache.PersistentCacheManager;
import org.ehcache.config.Builder;
import org.ehcache.config.CacheConfiguration;
import org.ehcache.config.Configuration;
import org.ehcache.config.units.MemoryUnit;
import org.ehcache.core.EhcacheManager;
import org.ehcache.core.spi.store.heap.SizeOfEngine;
import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration;
import org.ehcache.impl.config.event.CacheEventDispatcherFactoryConfiguration;
import org.ehcache.impl.config.loaderwriter.writebehind.WriteBehindProviderConfiguration;
import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration;
import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration;
import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration;
import org.ehcache.impl.config.store.disk.OffHeapDiskStoreProviderConfiguration;
import org.ehcache.spi.copy.Copier;
import org.ehcache.spi.serialization.Serializer;
import org.ehcache.spi.service.Service;
import org.ehcache.spi.service.ServiceCreationConfiguration;
import java.io.File;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import static java.util.Collections.emptySet;
import static java.util.Collections.unmodifiableSet;
import static org.ehcache.config.builders.ConfigurationBuilder.newConfigurationBuilder;
import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE;
import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE;
import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_UNIT;
/**
* The {@code CacheManagerBuilder} enables building cache managers using a fluent style.
* <p>
* As with all Ehcache builders, all instances are immutable and calling any method on the builder will return a new
* instance without modifying the one on which the method was called.
* This enables the sharing of builder instances without any risk of seeing them modified by code elsewhere.
*/
public class CacheManagerBuilder<T extends CacheManager> implements Builder<T> {
private final ConfigurationBuilder configBuilder;
private final Set<Service> services;
/**
* Builds a {@link CacheManager} or a subtype of it and initializes it if requested.
*
* @param init whether the returned {@code CacheManager} is to be initialized or not
* @return a {@code CacheManager} or a subtype of it
*/
public T build(final boolean init) {
final T cacheManager = newCacheManager(services, configBuilder.build());
if(init) {
cacheManager.init();
}
return cacheManager;
}
/**
* Builds a {@link CacheManager} or a subtype of it uninitialized.
*
* @return a {@code CacheManager} or a subtype of it uninitialized
*/
@Override
public T build() {
return build(false);
}
private CacheManagerBuilder() {
this.configBuilder = newConfigurationBuilder();
this.services = emptySet();
}
private CacheManagerBuilder(CacheManagerBuilder<T> builder, Set<Service> services) {
this.configBuilder = builder.configBuilder;
this.services = unmodifiableSet(services);
}
private CacheManagerBuilder(CacheManagerBuilder<T> builder, ConfigurationBuilder configBuilder) {
this.configBuilder = configBuilder;
this.services = builder.services;
}
/**
* Creates a new {@link CacheManager} based on the provided configuration.
* The returned {@code CacheManager} is uninitialized.
*
* @param configuration the configuration to use
* @return a {@code CacheManager}
*/
public static CacheManager newCacheManager(final Configuration configuration) {
return new EhcacheManager(configuration);
}
T newCacheManager(Collection<Service> services, final Configuration configuration) {
final EhcacheManager ehcacheManager = new EhcacheManager(configuration, services);
return cast(ehcacheManager);
}
@SuppressWarnings("unchecked")
T cast(EhcacheManager ehcacheManager) {
return (T) ehcacheManager;
}
/**
* Adds a {@link CacheConfiguration} linked to the specified alias to the returned builder.
*
* @param alias the cache alias
* @param configuration the {@code CacheConfiguration}
* @param <K> the cache key type
* @param <V> the cache value type
* @return a new builder with the added cache configuration
*
* @see CacheConfigurationBuilder
*/
public <K, V> CacheManagerBuilder<T> withCache(String alias, CacheConfiguration<K, V> configuration) {
return new CacheManagerBuilder<>(this, configBuilder.addCache(alias, configuration));
}
/**
* Convenience method to add a {@link CacheConfiguration} linked to the specified alias to the returned builder by
* building it from the provided {@link Builder}.
*
* @param alias the cache alias
* @param configurationBuilder the {@code Builder} to get {@code CacheConfiguration} from
* @param <K> the cache key type
* @param <V> the cache value type
* @return a new builder with the added cache configuration
*
* @see CacheConfigurationBuilder
*/
public <K, V> CacheManagerBuilder<T> withCache(String alias, Builder<? extends CacheConfiguration<K, V>> configurationBuilder) {
return withCache(alias, configurationBuilder.build());
}
/**
* Specializes the returned {@link CacheManager} subtype through a specific {@link CacheManagerConfiguration} which
* will optionally add configurations to the returned builder.
*
* @param cfg the {@code CacheManagerConfiguration} to use
* @param <N> the subtype of {@code CacheManager}
* @return a new builder ready to build a more specific subtype of cache manager
*
* @see #persistence(String)
* @see PersistentCacheManager
* @see CacheManagerPersistenceConfiguration
*/
public <N extends T> CacheManagerBuilder<N> with(CacheManagerConfiguration<N> cfg) {
return cfg.builder(this);
}
/**
* Convenience method to specialize the returned {@link CacheManager} subtype through a {@link CacheManagerConfiguration}
* built using the provided {@link Builder}.
*
* @param cfgBuilder the {@code Builder} to get the {@code CacheManagerConfiguration} from
* @return a new builder ready to build a more specific subtype of cache manager
*
* @see CacheConfigurationBuilder
*/
public <N extends T> CacheManagerBuilder<N> with(Builder<? extends CacheManagerConfiguration<N>> cfgBuilder) {
return with(cfgBuilder.build());
}
/**
* Adds a {@link Service} instance to the returned builder.
* <p>
* The service instance will be used by the constructed {@link CacheManager}.
*
* @param service the {@code Service} to add
* @return a new builder with the added service
*/
public CacheManagerBuilder<T> using(Service service) {
Set<Service> newServices = new HashSet<>(services);
newServices.add(service);
return new CacheManagerBuilder<>(this, newServices);
}
/**
* Adds a default {@link Copier} for the specified type to the returned builder.
*
* @param clazz the {@code Class} for which the copier is
* @param copier the {@code Copier} instance
* @param <C> the type which can be copied
* @return a new builder with the added default copier
*/
public <C> CacheManagerBuilder<T> withCopier(Class<C> clazz, Class<? extends Copier<C>> copier) {
DefaultCopyProviderConfiguration service = configBuilder.findServiceByClass(DefaultCopyProviderConfiguration.class);
if (service == null) {
service = new DefaultCopyProviderConfiguration();
service.addCopierFor(clazz, copier);
return new CacheManagerBuilder<>(this, configBuilder.addService(service));
} else {
DefaultCopyProviderConfiguration newConfig = new DefaultCopyProviderConfiguration(service);
newConfig.addCopierFor(clazz, copier, true);
return new CacheManagerBuilder<>(this, configBuilder.removeService(service).addService(newConfig));
}
}
/**
* Adds a default {@link Serializer} for the specified type to the returned builder.
*
* @param clazz the {@code Class} for which the serializer is
* @param serializer the {@code Serializer} instance
* @param <C> the type which can be serialized
* @return a new builder with the added default serializer
*/
public <C> CacheManagerBuilder<T> withSerializer(Class<C> clazz, Class<? extends Serializer<C>> serializer) {
DefaultSerializationProviderConfiguration service = configBuilder.findServiceByClass(DefaultSerializationProviderConfiguration.class);
if (service == null) {
service = new DefaultSerializationProviderConfiguration();
service.addSerializerFor(clazz, serializer);
return new CacheManagerBuilder<>(this, configBuilder.addService(service));
} else {
DefaultSerializationProviderConfiguration newConfig = new DefaultSerializationProviderConfiguration(service);
newConfig.addSerializerFor(clazz, serializer, true);
return new CacheManagerBuilder<>(this, configBuilder.removeService(service).addService(newConfig));
}
}
/**
* Adds a default {@link SizeOfEngine} configuration, that limits the max object graph to
* size, to the returned builder.
*
* @param size the max object graph size
* @return a new builder with the added configuration
*/
public CacheManagerBuilder<T> withDefaultSizeOfMaxObjectGraph(long size) {
DefaultSizeOfEngineProviderConfiguration configuration = configBuilder.findServiceByClass(DefaultSizeOfEngineProviderConfiguration.class);
if (configuration == null) {
return new CacheManagerBuilder<>(this, configBuilder.addService(new DefaultSizeOfEngineProviderConfiguration(DEFAULT_MAX_OBJECT_SIZE, DEFAULT_UNIT, size)));
} else {
ConfigurationBuilder builder = configBuilder.removeService(configuration);
return new CacheManagerBuilder<>(this, builder.addService(new DefaultSizeOfEngineProviderConfiguration(configuration
.getMaxObjectSize(), configuration.getUnit(), size)));
}
}
/**
* Adds a default {@link SizeOfEngine} configuration, that limits the max object size, to
* the returned builder.
*
* @param size the max object size
* @param unit the max object size unit
* @return a new builder with the added configuration
*/
public CacheManagerBuilder<T> withDefaultSizeOfMaxObjectSize(long size, MemoryUnit unit) {
DefaultSizeOfEngineProviderConfiguration configuration = configBuilder.findServiceByClass(DefaultSizeOfEngineProviderConfiguration.class);
if (configuration == null) {
return new CacheManagerBuilder<>(this, configBuilder.addService(new DefaultSizeOfEngineProviderConfiguration(size, unit, DEFAULT_OBJECT_GRAPH_SIZE)));
} else {
ConfigurationBuilder builder = configBuilder.removeService(configuration);
return new CacheManagerBuilder<>(this, builder.addService(new DefaultSizeOfEngineProviderConfiguration(size, unit, configuration
.getMaxObjectGraphSize())));
}
}
/**
* Adds a {@link WriteBehindProviderConfiguration}, that specifies the thread pool to use, to the returned builder.
*
* @param threadPoolAlias the thread pool alias
* @return a new builder with the added configuration
*
* @see PooledExecutionServiceConfigurationBuilder
*/
public CacheManagerBuilder<T> withDefaultWriteBehindThreadPool(String threadPoolAlias) {
WriteBehindProviderConfiguration config = configBuilder.findServiceByClass(WriteBehindProviderConfiguration.class);
if (config == null) {
return new CacheManagerBuilder<>(this, configBuilder.addService(new WriteBehindProviderConfiguration(threadPoolAlias)));
} else {
ConfigurationBuilder builder = configBuilder.removeService(config);
return new CacheManagerBuilder<>(this, builder.addService(new WriteBehindProviderConfiguration(threadPoolAlias)));
}
}
/**
* Adds a {@link OffHeapDiskStoreProviderConfiguration}, that specifies the thread pool to use, to the returned
* builder.
*
* @param threadPoolAlias the thread pool alias
* @return a new builder with the added configuration
*
* @see PooledExecutionServiceConfigurationBuilder
*/
public CacheManagerBuilder<T> withDefaultDiskStoreThreadPool(String threadPoolAlias) {
OffHeapDiskStoreProviderConfiguration config = configBuilder.findServiceByClass(OffHeapDiskStoreProviderConfiguration.class);
if (config == null) {
return new CacheManagerBuilder<>(this, configBuilder.addService(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias)));
} else {
ConfigurationBuilder builder = configBuilder.removeService(config);
return new CacheManagerBuilder<>(this, builder.addService(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias)));
}
}
/**
* Adds a {@link CacheEventDispatcherFactoryConfiguration}, that specifies the thread pool to use, to the returned
* builder.
*
* @param threadPoolAlias the thread pool alias
* @return a new builder with the added configuration
*
* @see PooledExecutionServiceConfigurationBuilder
*/
public CacheManagerBuilder<T> withDefaultEventListenersThreadPool(String threadPoolAlias) {
CacheEventDispatcherFactoryConfiguration config = configBuilder.findServiceByClass(CacheEventDispatcherFactoryConfiguration.class);
if (config == null) {
return new CacheManagerBuilder<>(this, configBuilder.addService(new CacheEventDispatcherFactoryConfiguration(threadPoolAlias)));
} else {
ConfigurationBuilder builder = configBuilder.removeService(config);
return new CacheManagerBuilder<>(this, builder.addService(new CacheEventDispatcherFactoryConfiguration(threadPoolAlias)));
}
}
/**
* Adds a {@link ServiceCreationConfiguration} to the returned builder.
* <p>
* These configurations are used to load services and configure them at creation time.
*
* @param serviceConfiguration the {@code ServiceCreationConfiguration} to use
* @return a new builder with the added configuration
*/
public CacheManagerBuilder<T> using(ServiceCreationConfiguration<?> serviceConfiguration) {
return new CacheManagerBuilder<>(this, configBuilder.addService(serviceConfiguration));
}
/**
* Replaces an existing {@link ServiceCreationConfiguration} of the same type on the returned builder.
* <p>
* Duplicate service creation configuration will cause a cache manager to fail to initialize.
*
* @param overwriteServiceConfiguration the new {@code ServiceCreationConfiguration} to use
* @return a new builder with the replaced configuration
*/
public CacheManagerBuilder<T> replacing(ServiceCreationConfiguration<?> overwriteServiceConfiguration) {
ServiceCreationConfiguration<?> existingConfiguration = configBuilder.findServiceByClass(overwriteServiceConfiguration.getClass());
return new CacheManagerBuilder<>(this, configBuilder.removeService(existingConfiguration)
.addService(overwriteServiceConfiguration));
}
/**
* Adds a {@link ClassLoader}, to use for non Ehcache types, to the returned builder
*
* @param classLoader the class loader to use
* @return a new builder with the added class loader
*/
public CacheManagerBuilder<T> withClassLoader(ClassLoader classLoader) {
return new CacheManagerBuilder<>(this, configBuilder.withClassLoader(classLoader));
}
/**
* Creates a new {@code CacheManagerBuilder}
*
* @return the cache manager builder
*/
public static CacheManagerBuilder<CacheManager> newCacheManagerBuilder() {
return new CacheManagerBuilder<>();
}
/**
* Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual
* level of persistence is configured on the disk resource pool per cache.
*
* @param rootDirectory the root directory to use for disk storage
* @return a {@code CacheManagerConfiguration}
*
* @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean)
* @see #with(CacheManagerConfiguration)
* @see PersistentCacheManager
*/
public static CacheManagerConfiguration<PersistentCacheManager> persistence(String rootDirectory) {
return persistence(new File(rootDirectory));
}
/**
* Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual
* level of persistence is configured on the disk resource pool per cache.
*
* @param rootDirectory the root directory to use for disk storage
* @return a {@code CacheManagerConfiguration}
*
* @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean)
* @see #with(CacheManagerConfiguration)
* @see PersistentCacheManager
*/
public static CacheManagerConfiguration<PersistentCacheManager> persistence(File rootDirectory) {
return new CacheManagerPersistenceConfiguration(rootDirectory);
}
}
| aurbroszniowski/ehcache3 | impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java | Java | apache-2.0 | 17,637 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.ml.datafeed;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.Is.is;
public class DelayedDataCheckConfigTests extends AbstractSerializingTestCase<DelayedDataCheckConfig> {
@Override
protected DelayedDataCheckConfig createTestInstance(){
return createRandomizedConfig(100);
}
@Override
protected Writeable.Reader<DelayedDataCheckConfig> instanceReader() {
return DelayedDataCheckConfig::new;
}
@Override
protected DelayedDataCheckConfig doParseInstance(XContentParser parser) {
return DelayedDataCheckConfig.STRICT_PARSER.apply(parser, null);
}
public void testConstructor() {
expectThrows(IllegalArgumentException.class, () -> new DelayedDataCheckConfig(true, TimeValue.MINUS_ONE));
expectThrows(IllegalArgumentException.class, () -> new DelayedDataCheckConfig(true, TimeValue.timeValueHours(25)));
}
public void testEnabledDelayedDataCheckConfig() {
DelayedDataCheckConfig delayedDataCheckConfig = DelayedDataCheckConfig.enabledDelayedDataCheckConfig(TimeValue.timeValueHours(5));
assertThat(delayedDataCheckConfig.isEnabled(), equalTo(true));
assertThat(delayedDataCheckConfig.getCheckWindow(), equalTo(TimeValue.timeValueHours(5)));
}
public void testDisabledDelayedDataCheckConfig() {
DelayedDataCheckConfig delayedDataCheckConfig = DelayedDataCheckConfig.disabledDelayedDataCheckConfig();
assertThat(delayedDataCheckConfig.isEnabled(), equalTo(false));
assertThat(delayedDataCheckConfig.getCheckWindow(), equalTo(null));
}
public void testDefaultDelayedDataCheckConfig() {
DelayedDataCheckConfig delayedDataCheckConfig = DelayedDataCheckConfig.defaultDelayedDataCheckConfig();
assertThat(delayedDataCheckConfig.isEnabled(), equalTo(true));
assertThat(delayedDataCheckConfig.getCheckWindow(), is(nullValue()));
}
public static DelayedDataCheckConfig createRandomizedConfig(long bucketSpanMillis) {
boolean enabled = randomBoolean();
TimeValue timeWindow = null;
if (enabled || randomBoolean()) {
// time span is required to be at least 1 millis, so we use a custom method to generate a time value here
timeWindow = new TimeValue(randomLongBetween(bucketSpanMillis,bucketSpanMillis*2));
}
return new DelayedDataCheckConfig(enabled, timeWindow);
}
@Override
protected DelayedDataCheckConfig mutateInstance(DelayedDataCheckConfig instance) throws IOException {
boolean enabled = instance.isEnabled();
TimeValue timeWindow = instance.getCheckWindow();
switch (between(0, 1)) {
case 0:
enabled = enabled == false;
if (randomBoolean()) {
timeWindow = TimeValue.timeValueMillis(randomLongBetween(1, 1000));
} else {
timeWindow = null;
}
break;
case 1:
if (timeWindow == null) {
timeWindow = TimeValue.timeValueMillis(randomLongBetween(1, 1000));
} else {
timeWindow = new TimeValue(timeWindow.getMillis() + between(10, 100));
}
enabled = true;
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new DelayedDataCheckConfig(enabled, timeWindow);
}
}
| ern/elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DelayedDataCheckConfigTests.java | Java | apache-2.0 | 4,017 |
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 2.0.12
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
namespace RakNet {
using System;
using System.Runtime.InteropServices;
public class Router2DebugInterface : IDisposable {
private HandleRef swigCPtr;
protected bool swigCMemOwn;
internal Router2DebugInterface(IntPtr cPtr, bool cMemoryOwn) {
swigCMemOwn = cMemoryOwn;
swigCPtr = new HandleRef(this, cPtr);
}
internal static HandleRef getCPtr(Router2DebugInterface obj) {
return (obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr;
}
~Router2DebugInterface() {
Dispose();
}
public virtual void Dispose() {
lock(this) {
if (swigCPtr.Handle != IntPtr.Zero) {
if (swigCMemOwn) {
swigCMemOwn = false;
RakNetPINVOKE.delete_Router2DebugInterface(swigCPtr);
}
swigCPtr = new HandleRef(null, IntPtr.Zero);
}
GC.SuppressFinalize(this);
}
}
public Router2DebugInterface() : this(RakNetPINVOKE.new_Router2DebugInterface(), true) {
}
public virtual void ShowFailure(string message) {
RakNetPINVOKE.Router2DebugInterface_ShowFailure(swigCPtr, message);
}
public virtual void ShowDiagnostic(string message) {
RakNetPINVOKE.Router2DebugInterface_ShowDiagnostic(swigCPtr, message);
}
}
}
| braaad/RakNet | DependentExtensions/Swig/SwigWindowsCSharpSample/SwigTestApp/SwigFiles/Router2DebugInterface.cs | C# | bsd-2-clause | 1,592 |
class ArgpStandalone < Formula
desc "Standalone version of arguments parsing functions from GLIBC"
homepage "https://www.lysator.liu.se/~nisse/misc/"
url "https://www.lysator.liu.se/~nisse/misc/argp-standalone-1.3.tar.gz"
sha256 "dec79694da1319acd2238ce95df57f3680fea2482096e483323fddf3d818d8be"
livecheck do
url :homepage
regex(/href=.*?argp-standalone[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "560e1829dce7113479d755380e0e65318a996f0d04d8e0761f24810e9e019e7d" => :catalina
sha256 "fb60d10ba68efda61d1dfdb161bcf9bfa0474c82b03e0579517cb10608aa9aea" => :mojave
sha256 "92532fafd8c2cc86b33de0f347496746d8049bb4d1a6ce0948148e0f3c4bca5a" => :high_sierra
sha256 "10627e72c0e0eb66cbd03a2beb767c06b8edad4bef01914de7f7c6c1be33a356" => :sierra
sha256 "798e6ddb78957f9ad33662287b5971aaf3a43f3646e84691d56b3b85ca06d47f" => :el_capitan
sha256 "c926ac0ad3b8dbb8c3e08299ade556470f81d3a88eb51dc60e7cfe107da533e8" => :yosemite
sha256 "789a73a54793c058ee419824d76d603562d56fe6c2bce37c6b5b47f8f0ddce2a" => :mavericks
end
# This patch fixes compilation with Clang.
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/b5f0ad3/argp-standalone/patch-argp-fmtstream.h"
sha256 "5656273f622fdb7ca7cf1f98c0c9529bed461d23718bc2a6a85986e4f8ed1cb8"
end
def install
system "./configure", "--prefix=#{prefix}"
system "make", "install"
lib.install "libargp.a"
include.install "argp.h"
end
test do
(testpath/"test.c").write <<~EOS
#include <stdio.h>
#include <argp.h>
int main(int argc, char ** argv)
{
return argp_parse(0, argc, argv, 0, 0, 0);
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-largp", "-o", "test"
system "./test"
end
end
| jabenninghoff/homebrew-core | Formula/argp-standalone.rb | Ruby | bsd-2-clause | 1,832 |
// Copyright (C) 2016 the V8 project authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
esid: sec-html-like-comments
description: SingleLineHTMLOpenComment
info: |
Comment ::
MultiLineComment
SingleLineComment
SingleLineHTMLOpenComment
SingleLineHTMLCloseComment
SingleLineDelimitedComment
SingleLineHTMLOpenComment ::
<!--SingleLineCommentCharsopt
negative:
phase: runtime
type: Test262Error
---*/
var counter = 0;
<!--
counter += 1;
<!--the comment extends to these characters
counter += 1;
counter += 1;<!--the comment extends to these characters
counter += 1;
var x = 0;
x = -1 <!--x;
// Because this test concerns the interpretation of non-executable character
// sequences within ECMAScript source code, special care must be taken to
// ensure that executable code is evaluated as expected.
//
// Express the intended behavior by intentionally throwing an error; this
// guarantees that test runners will only consider the test "passing" if
// executable sequences are correctly interpreted as such.
if (counter === 4 && x === -1) {
throw new Test262Error();
}
| sebastienros/jint | Jint.Tests.Test262/test/annexB/language/comments/single-line-html-open.js | JavaScript | bsd-2-clause | 1,179 |
class SwaggerCodegenAT2 < Formula
desc "Generate clients, server stubs, and docs from an OpenAPI spec"
homepage "https://swagger.io/swagger-codegen/"
url "https://github.com/swagger-api/swagger-codegen/archive/v2.4.12.tar.gz"
sha256 "52a61b28a95a250fdbe7ef38ef9ea1116878b6271e768e054404336ab19f1c14"
bottle do
cellar :any_skip_relocation
sha256 "76cfaccf0ba7e190381d04b08078e14e27dcb46d572d85f6f4097d78563c6113" => :catalina
sha256 "38d11eaecb8e3d0f555b8fdac370df7f5b09c41eacbc1ba70db7f51bf00cc9c9" => :mojave
sha256 "a7a408013e8775c8cd959a716e0266f2c61bd595011135ee9d605c1d05765858" => :high_sierra
end
keg_only :versioned_formula
depends_on "maven" => :build
depends_on :java => "1.8"
def install
# Need to set JAVA_HOME manually since maven overrides 1.8 with 1.7+
cmd = Language::Java.java_home_cmd("1.8")
ENV["JAVA_HOME"] = Utils.popen_read(cmd).chomp
system "mvn", "clean", "package"
libexec.install "modules/swagger-codegen-cli/target/swagger-codegen-cli.jar"
bin.write_jar_script libexec/"swagger-codegen-cli.jar", "swagger-codegen"
end
test do
(testpath/"minimal.yaml").write <<~EOS
---
swagger: '2.0'
info:
version: 0.0.0
title: Simple API
paths:
/:
get:
responses:
200:
description: OK
EOS
system "#{bin}/swagger-codegen", "generate", "-i", "minimal.yaml", "-l", "html2"
assert_includes File.read(testpath/"index.html"), "<h1>Simple API</h1>"
end
end
| BrewTestBot/homebrew-core | Formula/swagger-codegen@2.rb | Ruby | bsd-2-clause | 1,550 |
// Copyright (C) 2016 the V8 project authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
esid: sec-%typedarray%.prototype.copywithin
description: >
Set values with out of bounds negative target argument.
info: |
22.2.3.5 %TypedArray%.prototype.copyWithin (target, start [ , end ] )
%TypedArray%.prototype.copyWithin is a distinct function that implements the
same algorithm as Array.prototype.copyWithin as defined in 22.1.3.3 except
that the this object's [[ArrayLength]] internal slot is accessed in place of
performing a [[Get]] of "length" and the actual copying of values in step 12
must be performed in a manner that preserves the bit-level encoding of the
source data.
...
22.1.3.3 Array.prototype.copyWithin (target, start [ , end ] )
...
4. If relativeTarget < 0, let to be max((len + relativeTarget), 0); else let
to be min(relativeTarget, len).
...
includes: [compareArray.js, testBigIntTypedArray.js]
features: [BigInt, TypedArray]
---*/
testWithBigIntTypedArrayConstructors(function(TA) {
assert(
compareArray(
new TA([0n, 1n, 2n, 3n]).copyWithin(-10, 0),
[0n, 1n, 2n, 3n]
),
'[0, 1, 2, 3].copyWithin(-10, 0) -> [0, 1, 2, 3]'
);
assert(
compareArray(
new TA([1n, 2n, 3n, 4n, 5n]).copyWithin(-Infinity, 0),
[1n, 2n, 3n, 4n, 5n]
),
'[1, 2, 3, 4, 5].copyWithin(-Infinity, 0) -> [1, 2, 3, 4, 5]'
);
assert(
compareArray(
new TA([0n, 1n, 2n, 3n, 4n]).copyWithin(-10, 2),
[2n, 3n, 4n, 3n, 4n]
),
'[0, 1, 2, 3, 4].copyWithin(-10, 2) -> [2, 3, 4, 3, 4]'
);
assert(
compareArray(
new TA([1n, 2n, 3n, 4n, 5n]).copyWithin(-Infinity, 2),
[3n, 4n, 5n, 4n, 5n]
),
'[1, 2, 3, 4, 5].copyWithin(-Infinity, 2) -> [3, 4, 5, 4, 5]'
);
});
| sebastienros/jint | Jint.Tests.Test262/test/built-ins/TypedArray/prototype/copyWithin/BigInt/negative-out-of-bounds-target.js | JavaScript | bsd-2-clause | 1,835 |
// -*- mode: C++ -*-
//
// Copyright (c) 2007, 2008, 2009, 2010, 2011 The University of Utah
// All rights reserved.
//
// This file is part of `csmith', a random generator of C programs.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#ifndef STATEMENT_BREAK_H
#define STATEMENT_BREAK_H
///////////////////////////////////////////////////////////////////////////////
#include <ostream>
#include "Statement.h"
class Expression;
class Block;
class CGContext;
/*
*
*/
class StatementBreak : public Statement
{
public:
// Factory method.
static StatementBreak *make_random(CGContext &cg_context);
StatementBreak(Block* parent, const Expression &test, const Block& b);
StatementBreak(const StatementBreak &sc);
virtual ~StatementBreak(void);
//
virtual bool must_jump(void) const;
virtual void get_blocks(std::vector<const Block*>& /* blks */) const {};
virtual void get_exprs(std::vector<const Expression*>& exps) const {exps.push_back(&test);}
virtual bool visit_facts(vector<const Fact*>& inputs, CGContext& cg_context) const;
virtual void Output(std::ostream &out, FactMgr* fm, int indent = 0) const;
const Expression &test;
const Block& loop_blk;
};
///////////////////////////////////////////////////////////////////////////////
#endif // STATEMENT_BREAK_H
// Local Variables:
// c-basic-offset: 4
// tab-width: 4
// End:
// End of file.
| DmitryOlshansky/dsmith | src/StatementBreak.h | C | bsd-2-clause | 2,629 |
class Travis < Formula
desc "Command-line client for Travis CI"
homepage "https://github.com/travis-ci/travis.rb/"
url "https://github.com/travis-ci/travis.rb/archive/v1.8.9.tar.gz"
sha256 "7a143bd0eb90e825370c808d38b70cca8c399c68bea8138442f40f09b6bbafc4"
revision 3
bottle do
sha256 "d695085c0886f6db8d5a1afeb6cae27e61d7fcc41d2e069c16ce0909048dba64" => :mojave
sha256 "32634e86fdc04e7ca0df1834cf1ee6e8cdd3bfb0f89c0e4ed36a0d55b08483b3" => :high_sierra
sha256 "1d21554de55885fd08e4433772663410044f088cf346b63533142019a2d865f5" => :sierra
end
depends_on "ruby" if MacOS.version <= :sierra
resource "addressable" do
url "https://rubygems.org/gems/addressable-2.4.0.gem"
sha256 "7abfff765571b0a73549c9a9d2f7e143979cd0c252f7fa4c81e7102a973ef656"
end
resource "backports" do
url "https://rubygems.org/gems/backports-3.11.3.gem"
sha256 "57b04d4e2806c199bff3663d810db25e019cf88c42cacc0edbb36d3038d6a5ab"
end
resource "ethon" do
url "https://rubygems.org/gems/ethon-0.11.0.gem"
sha256 "88ec7960a8e00f76afc96ed15dcc8be0cb515f963fe3bb1d4e0b5c51f9d7e078"
end
resource "faraday" do
url "https://rubygems.org/gems/faraday-0.15.2.gem"
sha256 "affa23f5e5ee27170cbb5045c580af9b396bac525516c6583661c2bb08038f92"
end
resource "faraday_middleware" do
url "https://rubygems.org/gems/faraday_middleware-0.12.2.gem"
sha256 "2d90093c18c23e7f5a6f602ed3114d2c62abc3f7f959dd3046745b24a863f1dc"
end
resource "ffi" do
url "https://rubygems.org/gems/ffi-1.9.25.gem"
sha256 "f854f08f08190fec772a12e863f33761d02ad3efea3c3afcdeffc8a06313f54a"
end
resource "gh" do
url "https://rubygems.org/gems/gh-0.15.1.gem"
sha256 "ef733f81c17846f217f5ad9616105e9adc337775d41de1cc330133ad25708d3c"
end
resource "highline" do
url "https://rubygems.org/gems/highline-1.7.10.gem"
sha256 "1e147d5d20f1ad5b0e23357070d1e6d0904ae9f71c3c49e0234cf682ae3c2b06"
end
if MacOS.version <= :sierra
resource "json" do
url "https://rubygems.org/gems/json-2.1.0.gem"
sha256 "b76fd09b881088c6c64a12721a1528f2f747a1c2ee52fab4c1f60db8af946607"
end
end
resource "launchy" do
url "https://rubygems.org/gems/launchy-2.4.3.gem"
sha256 "42f52ce12c6fe079bac8a804c66522a0eefe176b845a62df829defe0e37214a4"
end
resource "multi_json" do
url "https://rubygems.org/gems/multi_json-1.13.1.gem"
sha256 "db8613c039b9501e6b2fb85efe4feabb02f55c3365bae52bba35381b89c780e6"
end
resource "multipart-post" do
url "https://rubygems.org/gems/multipart-post-2.0.0.gem"
sha256 "3dc44e50d3df3d42da2b86272c568fd7b75c928d8af3cc5f9834e2e5d9586026"
end
resource "net-http-persistent" do
url "https://rubygems.org/gems/net-http-persistent-2.9.4.gem"
sha256 "24274d207ffe66222ef70c78a052c7ea6e66b4ff21e2e8a99e3335d095822ef9"
end
resource "net-http-pipeline" do
url "https://rubygems.org/gems/net-http-pipeline-1.0.1.gem"
sha256 "6923ce2f28bfde589a9f385e999395eead48ccfe4376d4a85d9a77e8c7f0b22f"
end
resource "pusher-client" do
url "https://rubygems.org/gems/pusher-client-0.6.2.gem"
sha256 "c405c931090e126c056d99f6b69a01b1bcb6cbfdde02389c93e7d547c6efd5a3"
end
resource "typhoeus" do
url "https://rubygems.org/gems/typhoeus-0.8.0.gem"
sha256 "28b7cf3c7d915a06d412bddab445df94ab725252009aa409f5ea41ab6577a30f"
end
resource "websocket" do
url "https://rubygems.org/gems/websocket-1.2.8.gem"
sha256 "1d8155c1cdaab8e8e72587a60e08423c9dd84ee44e4e827358ce3d4c2ccb2138"
end
def install
ENV["GEM_HOME"] = libexec
resources.each do |r|
r.verify_download_integrity(r.fetch)
system "gem", "install", r.cached_download, "--ignore-dependencies",
"--no-document", "--install-dir", libexec
end
system "gem", "build", "travis.gemspec"
system "gem", "install", "--ignore-dependencies", "travis-#{version}.gem"
bin.install libexec/"bin/travis"
bin.env_script_all_files(libexec/"bin", :GEM_HOME => ENV["GEM_HOME"])
end
test do
(testpath/".travis.yml").write <<~EOS
language: ruby
sudo: true
matrix:
include:
- os: osx
rvm: system
EOS
output = shell_output("#{bin}/travis lint #{testpath}/.travis.yml")
assert_match "valid", output
output = shell_output("#{bin}/travis init 2>&1", 1)
assert_match "Can't figure out GitHub repo name", output
end
end
| adamliter/homebrew-core | Formula/travis.rb | Ruby | bsd-2-clause | 4,429 |
cask 'eclipse-installer' do
version :latest
sha256 :no_check
url 'http://eclipse.org/downloads/download.php?file=/oomph/products/eclipse-inst-mac64.tar.gz&r=1'
name 'Eclipse Installer'
homepage 'http://eclipse.org/'
license :eclipse
app 'Eclipse Installer.app'
depends_on :macos => '>= :leopard'
depends_on :arch => :x86_64
caveats <<-EOS.undent
#{token} requires Java. You can install the latest version with
brew cask install java
EOS
end
| corbt/homebrew-cask | Casks/eclipse-installer.rb | Ruby | bsd-2-clause | 477 |
# Sample code from Programing Ruby, page 18
line = 'abc'
line.gsub(/Perl|Python/, 'Ruby')
| wkoszek/book-programming-ruby | src/ex0035.rb | Ruby | bsd-2-clause | 93 |
// Copyright (c) 2012 Ecma International. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es5id: 15.2.3.6-3-114
description: >
Object.defineProperty - 'configurable' property in 'Attributes' is
a Boolean object (8.10.5 step 4.b)
---*/
var obj = {};
Object.defineProperty(obj, "property", {
configurable: new Boolean(true)
});
var beforeDeleted = obj.hasOwnProperty("property");
delete obj.property;
var afterDeleted = obj.hasOwnProperty("property");
assert.sameValue(beforeDeleted, true, 'beforeDeleted');
assert.sameValue(afterDeleted, false, 'afterDeleted');
| sebastienros/jint | Jint.Tests.Test262/test/built-ins/Object/defineProperty/15.2.3.6-3-114.js | JavaScript | bsd-2-clause | 629 |
cask :v1 => 'jenkins' do
version '1.639'
sha256 'be0637aa4b0078d3998d12d1e51ae9a79343d948d21c803a1d33855e94dade36'
url "http://mirrors.jenkins-ci.org/osx/jenkins-#{version}.pkg"
name 'Jenkins'
homepage 'https://jenkins-ci.org/'
license :mit
pkg "jenkins-#{version}.pkg"
binary '/Library/Application Support/Jenkins/jenkins-runner.sh', :target => 'jenkins-runner'
uninstall :script => '/Library/Application Support/Jenkins/Uninstall.command',
:pkgutil => 'org.jenkins-ci.*pkg',
:launchctl => 'org.jenkins-ci'
zap :delete => '/Library/Preferences/org.jenkins-ci.plist'
conflicts_with :formula => %w{
jenkins
homebrew/versions/jenkins-lts
},
:cask => 'caskroom/versions/jenkins-lts'
caveats <<-EOS.undent
#{token} requires Java. You can install the latest version with
brew cask install java
You can change the launch parameters for #{token} using "defaults",
as described in
https://wiki.jenkins-ci.org/display/JENKINS/Thanks+for+using+OSX+Installer
Alternatively, you can directly run #{token} with custom parameters, eg
java -jar /Applications/Jenkins/jenkins.war -XX:PermSize=$MIN_PERM_GEN --httpPort=$HTTP_PORT
For more options, see
https://wiki.jenkins-ci.org/display/JENKINS/Starting+and+Accessing+Jenkins
EOS
end
| brianshumate/homebrew-cask | Casks/jenkins.rb | Ruby | bsd-2-clause | 1,441 |
# Copyright (c) 2016 Ruslan Baratov
# All rights reserved.
# !!! DO NOT PLACE HEADER GUARDS HERE !!!
include(hunter_add_version)
include(hunter_cacheable)
include(hunter_download)
include(hunter_pick_scheme)
# https://dri.freedesktop.org/libdrm/
hunter_add_version(
PACKAGE_NAME
drm
VERSION
"2.4.67"
URL
"https://dri.freedesktop.org/libdrm/libdrm-2.4.67.tar.bz2"
SHA1
21d43437219ddd1e409fb4b7d77254cd129e8075
)
hunter_pick_scheme(DEFAULT drm)
hunter_cacheable(drm)
hunter_download(
PACKAGE_NAME drm
PACKAGE_UNRELOCATABLE_TEXT_FILES
"lib/libdrm.la"
"lib/libdrm_amdgpu.la"
"lib/libdrm_nouveau.la"
"lib/libdrm_radeon.la"
"lib/libkms.la"
"lib/pkgconfig/libdrm.pc"
"lib/pkgconfig/libdrm_amdgpu.pc"
"lib/pkgconfig/libdrm_intel.pc"
"lib/pkgconfig/libdrm_nouveau.pc"
"lib/pkgconfig/libdrm_radeon.pc"
"lib/pkgconfig/libkms.pc"
)
| daminetreg/hunter | cmake/projects/drm/hunter.cmake | CMake | bsd-2-clause | 908 |
// Copyright (c) 2012 Ecma International. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es5id: 15.2.3.3-2-25
description: >
Object.getOwnPropertyDescriptor - argument 'P' is a number that
converts to a string (value is 1e-7)
---*/
var obj = {
"1e-7": 1
};
var desc = Object.getOwnPropertyDescriptor(obj, 1e-7);
assert.sameValue(desc.value, 1, 'desc.value');
| sebastienros/jint | Jint.Tests.Test262/test/built-ins/Object/getOwnPropertyDescriptor/15.2.3.3-2-25.js | JavaScript | bsd-2-clause | 425 |
/*
** Copyright 2001, Travis Geiselbrecht. All rights reserved.
** Distributed under the terms of the NewOS License.
*/
#include <boot/stage2.h>
#include <boot/shared/openfirmware.h>
#include <kernel/kernel.h>
#include <arch/cpu.h>
#include <libc/string.h>
#include "stage2_priv.h"
static unsigned int primary_hash(unsigned int vsid, unsigned int vaddr);
static unsigned int secondary_hash(unsigned int primary_hash);
static struct ppc_pteg *ptable = 0;
static int ptable_size = 0;
static unsigned int ptable_hash_mask = 0;
static unsigned long total_ram_size = 0;
static void print_pte(struct ppc_pte *e);
static bool does_intersect(unsigned long base1, unsigned long len1, unsigned long base2, unsigned long len2)
{
unsigned long end1 = base1 + len1;
unsigned long end2 = base2 + len2;
if(base2 >= base1 && base2 <= end1)
return true; // base2 is inside first range
if(end2 >= base1 && end2 <= end1)
return true; // end of second range inside first range
if(base1 >= base2 && base1 <= end2)
return true; // base1 is inside second range
if(end1 >= base2 && end1 <= end2)
return true; // end of first range inside second range
return false;
}
static void find_phys_memory_map(kernel_args *ka)
{
int handle;
unsigned int i;
struct mem_region {
unsigned long pa;
int len;
} mem_regions[32];
unsigned int mem_regions_len = 0;
// get the physical memory map of the system
handle = of_finddevice("/chosen");
of_getprop(handle, "memory", &handle, sizeof(handle));
handle = of_instance_to_package(handle);
memset(mem_regions, 0, sizeof(mem_regions));
mem_regions_len = of_getprop(handle, "reg", mem_regions, sizeof(mem_regions));
mem_regions_len /= sizeof(struct mem_region);
printf("num mem regions %d\n", mem_regions_len);
// copy these regions over to the kernel args structure
ka->num_phys_mem_ranges = 0;
for(i=0; i<mem_regions_len; i++) {
if(mem_regions[i].len > 0) {
total_ram_size += mem_regions[i].len;
if(ka->num_phys_mem_ranges > 0) {
if(mem_regions[i].pa == ka->phys_mem_range[ka->num_phys_mem_ranges-1].start + ka->phys_mem_range[ka->num_phys_mem_ranges-1].size) {
// this range just extends the old one
ka->phys_mem_range[ka->num_phys_mem_ranges-1].size += mem_regions[i].len;
continue;
}
}
ka->phys_mem_range[ka->num_phys_mem_ranges].start = mem_regions[i].pa;
ka->phys_mem_range[ka->num_phys_mem_ranges].size = mem_regions[i].len;
ka->num_phys_mem_ranges++;
if(ka->num_phys_mem_ranges == MAX_PHYS_MEM_ADDR_RANGE) {
printf("too many physical memory maps, increase MAX_PHYS_MEM_ADDR_RANGE\n");
for(;;);
}
}
}
printf("num phys mem regions %d\n", ka->num_phys_mem_ranges);
for(i=0; i<ka->num_phys_mem_ranges; i++) {
printf(" phys map %d: pa 0x%lx, len 0x%lx\n", i, ka->phys_mem_range[i].start, ka->phys_mem_range[i].size);
}
}
static bool is_in_phys_mem(kernel_args *ka, unsigned long addr)
{
unsigned int i;
for(i = 0; i < ka->num_phys_mem_ranges; i++) {
if(does_intersect(ka->phys_mem_range[i].start, ka->phys_mem_range[i].size, addr, 0))
return true;
}
return false;
}
static void mark_used_phys_mem_range(kernel_args *ka, unsigned long base, unsigned long len)
{
unsigned int i;
unsigned long start;
base = ROUNDOWN(base, PAGE_SIZE);
len = ROUNDUP(len, PAGE_SIZE);
start = base;
while(start < base + len){
// cycle through the list of physical runs of used pages,
// seeing if start will intersect one of them
for(i = 0; i < ka->num_phys_alloc_ranges; i++) {
if(start == ka->phys_alloc_range[i].start + ka->phys_alloc_range[i].size) {
// it will extend it
ka->phys_alloc_range[i].size += PAGE_SIZE;
goto next_page;
}
if(start + PAGE_SIZE == ka->phys_alloc_range[i].start) {
// it will prepend it
ka->phys_alloc_range[i].start = start;
ka->phys_alloc_range[i].size += PAGE_SIZE;
goto next_page;
}
if(does_intersect(ka->phys_alloc_range[i].start, ka->phys_alloc_range[i].size, start, PAGE_SIZE)) {
// it's off in the middle of this range, skip it
goto next_page;
}
}
// didn't find it in one of the existing ranges, must need to start a new one
if(ka->num_phys_alloc_ranges >= MAX_PHYS_ALLOC_ADDR_RANGE) {
printf("mark_used_phys_mem_range: MAX_PHYS_ALLOC_ADDR_RANGE (%d) too small\n", MAX_PHYS_ALLOC_ADDR_RANGE);
for(;;);
}
// create a new allocated range
ka->phys_alloc_range[ka->num_phys_alloc_ranges].start = start;
ka->phys_alloc_range[ka->num_phys_alloc_ranges].size = PAGE_SIZE;
ka->num_phys_alloc_ranges++;
next_page:
start += PAGE_SIZE;
}
}
static void find_used_phys_memory_map(kernel_args *ka)
{
int handle;
unsigned int i;
struct translation_map {
unsigned long va;
int len;
unsigned long pa;
int mode;
} memmap[64];
unsigned int translation_map_len = 0;
printf("looking for current translations...\n");
ka->num_phys_alloc_ranges = 0;
// get the current translation map of the system,
// to find how much memory was mapped to load the stage1 and bootdir
handle = of_finddevice("/chosen");
of_getprop(handle, "mmu", &handle, sizeof(handle));
handle = of_instance_to_package(handle);
memset(memmap, 0, sizeof(memmap));
translation_map_len = of_getprop(handle, "translations", memmap, sizeof(memmap));
translation_map_len /= sizeof(struct translation_map);
printf("found %d translations\n", translation_map_len);
for(i=0; i<translation_map_len; i++) {
printf("package loaded at pa 0x%lx va 0x%lx, len 0x%x\n", memmap[i].pa, memmap[i].va, memmap[i].len);
if(is_in_phys_mem(ka, memmap[i].va)) {
// we found the translation that covers the loaded package. Save this.
mark_used_phys_mem_range(ka, memmap[i].pa, memmap[i].len);
}
}
for(i=0; i<ka->num_phys_alloc_ranges; i++) {
printf("phys alloc map %d: pa 0x%lx, len 0x%lx\n", i, ka->phys_alloc_range[i].start, ka->phys_alloc_range[i].size);
}
}
static void mark_used_virt_mem_range(kernel_args *ka, unsigned long base, unsigned long len)
{
unsigned int i;
unsigned long start;
base = ROUNDOWN(base, PAGE_SIZE);
len = ROUNDUP(len, PAGE_SIZE);
start = base;
while(start < base + len) {
// cycle through the list of virtual runs of used pages,
// seeing if start will intersect one of them
for(i = 0; i < ka->num_virt_alloc_ranges; i++) {
if(start == ka->virt_alloc_range[i].start + ka->virt_alloc_range[i].size) {
// it will extend it
ka->virt_alloc_range[i].size += PAGE_SIZE;
goto next_page;
}
if(start + PAGE_SIZE == ka->virt_alloc_range[i].start) {
// it will prepend it
ka->virt_alloc_range[i].start = start;
ka->virt_alloc_range[i].size += PAGE_SIZE;
goto next_page;
}
if(does_intersect(ka->virt_alloc_range[i].start, ka->virt_alloc_range[i].size, start, PAGE_SIZE)) {
// it's off in the middle of this range, skip it
goto next_page;
}
}
// didn't find it in one of the existing ranges, must need to start a new one
if(ka->num_virt_alloc_ranges >= MAX_VIRT_ALLOC_ADDR_RANGE) {
printf("mark_used_virt_mem_range: MAX_VIRT_ALLOC_ADDR_RANGE (%d) too small\n", MAX_VIRT_ALLOC_ADDR_RANGE);
for(;;);
}
// create a new allocated range
ka->virt_alloc_range[ka->num_virt_alloc_ranges].start = start;
ka->virt_alloc_range[ka->num_virt_alloc_ranges].size = PAGE_SIZE;
ka->num_virt_alloc_ranges++;
next_page:
start += PAGE_SIZE;
}
}
unsigned long mmu_allocate_page(kernel_args *ka)
{
unsigned long page;
if(ka->num_phys_alloc_ranges == 0) {
// no physical allocated ranges, start one
page = ka->phys_mem_range[0].start;
mark_used_phys_mem_range(ka, page, PAGE_SIZE);
return page;
}
// allocate from the first allocated physical range
page = ka->phys_alloc_range[0].start + ka->phys_alloc_range[0].size;
ka->phys_alloc_range[0].size += PAGE_SIZE;
// XXX check for validity better
return page;
}
static void tlbia()
{
unsigned long i;
asm volatile("sync");
for(i=0; i< 0x40000; i += 0x1000) {
asm volatile("tlbie %0" :: "r" (i));
asm volatile("eieio");
asm volatile("sync");
}
asm volatile("tlbsync");
asm volatile("sync");
}
#define CACHELINE 32
void syncicache(void *address, int len)
{
int l, off;
char *p;
off = (unsigned int)address & (CACHELINE - 1);
len += off;
l = len;
p = (char *)address - off;
do {
asm volatile ("dcbst 0,%0" :: "r"(p));
p += CACHELINE;
} while((l -= CACHELINE) > 0);
asm volatile ("sync");
p = (char *)address - off;
do {
asm volatile ("icbi 0,%0" :: "r"(p));
p += CACHELINE;
} while((len -= CACHELINE) > 0);
asm volatile ("sync");
asm volatile ("isync");
}
int s2_mmu_init(kernel_args *ka)
{
unsigned int ibats[8];
unsigned int dbats[8];
unsigned long top_ram = 0;
int i;
ka->num_virt_alloc_ranges = 0;
// figure out where physical memory is and what is being used
find_phys_memory_map(ka);
find_used_phys_memory_map(ka);
#if 0
// find the largest address of physical memory, but with a max of 256 MB,
// so it'll be within our 256 MB BAT window
for(i=0; i<ka->num_phys_mem_ranges; i++) {
if(ka->phys_mem_range[i].start + ka->phys_mem_range[i].size > top_ram) {
if(ka->phys_mem_range[i].start + ka->phys_mem_range[i].size > 256*1024*1024) {
if(ka->phys_mem_range[i].start < 256*1024*1024) {
top_ram = 256*1024*1024;
break;
}
}
top_ram = ka->phys_mem_range[i].start + ka->phys_mem_range[i].size;
}
}
printf("top of ram (but under 256MB) is 0x%x\n", top_ram);
#endif
// figure the size of the new pagetable, as recommended by Motorola
if(total_ram_size <= 8*1024*1024) {
ptable_size = 64*1024;
} else if(total_ram_size <= 16*1024*1024) {
ptable_size = 128*1024;
} else if(total_ram_size <= 32*1024*1024) {
ptable_size = 256*1024;
} else if(total_ram_size <= 64*1024*1024) {
ptable_size = 512*1024;
} else if(total_ram_size <= 128*1024*1024) {
ptable_size = 1024*1024;
} else if(total_ram_size <= 256*1024*1024) {
ptable_size = 2*1024*1024;
} else if(total_ram_size <= 512*1024*1024) {
ptable_size = 4*1024*1024;
} else if(total_ram_size <= 1024*1024*1024) {
ptable_size = 8*1024*1024;
} else if(total_ram_size <= 2*1024*1024*1024UL) {
ptable_size = 16*1024*1024;
} else {
ptable_size = 32*1024*1024;
}
// look at the old page table
printf("old page table at 0x%x, size 0x%x\n", (addr_t)getsdr1() & 0xffff0000,
(((addr_t)getsdr1() & 0x1ff) + 1) << 16);
// figure out where to put the page table
printf("allocating a page table using claim\n");
ptable_hash_mask = (ptable_size >> 6) - 1;
ptable = (struct ppc_pteg *)of_claim(0, ptable_size, ptable_size);
printf("ptable at pa 0x%x, size 0x%x\n", ptable, ptable_size);
printf("mask = 0x%x\n", ptable_hash_mask);
// mark it used
mark_used_phys_mem_range(ka, (unsigned long)ptable, ptable_size);
// save it's new location in the kernel args
ka->arch_args.page_table.start = (unsigned long)ptable;
ka->arch_args.page_table.size = ptable_size;
ka->arch_args.page_table_mask = ptable_hash_mask;
#if 0
{
struct ppc_pteg *old_ptable;
int j;
printf("sdr1 = 0x%x\n", getsdr1());
old_ptable = (struct ppc_pteg *)((unsigned int)getsdr1() & 0xffff0000);
printf("old_ptable %p\n", old_ptable);
for(i=0; i< (64*1024) >> 6 ; i++) {
for(j=0; j< 8; j++)
if(old_ptable[i].pte[j].v && old_ptable[i].pte[j].vsid == 0)
print_pte(&old_ptable[i].pte[j]);
}
}
#endif
unsigned int sp;
asm volatile("mr %0,1" : "=r"(sp));
printf("sp = 0x%x\n", sp);
/* set up the new BATs */
getibats(ibats);
getdbats(dbats);
for(i=0; i<8; i++) {
ibats[i] = 0;
dbats[i] = 0;
}
// identity map the first 256MB of RAM
dbats[0] = ibats[0] = BATU_LEN_256M | BATU_VS;
dbats[1] = ibats[1] = BATL_CI | BATL_PP_RW;
// XXX fix
dbats[2] = ibats[2] = 0x80000000 | BATU_LEN_256M | BATU_VS;
dbats[3] = ibats[3] = 0x80000000 | BATL_CI | BATL_PP_RW;
dbats[4] = ibats[4] = 0x90000000 | BATU_LEN_256M | BATU_VS;
dbats[5] = ibats[5] = 0x90000000 | BATL_CI | BATL_PP_RW;
dbats[6] = ibats[6] = 0xf0000000 | BATU_LEN_256M | BATU_VS;
dbats[7] = ibats[7] = 0xf0000000 | BATL_CI | BATL_PP_RW;
#if 0
// map the framebuffer using a BAT to 256MB
{
unsigned int framebuffer_phys = ka->fb.mapping.start & ~((16*1024*1024) - 1);
dbats[2] = ibats[2] = 0x10000000 | BATU_LEN_16M | BATU_VS;
dbats[3] = ibats[3] = framebuffer_phys | BATL_CI | BATL_PP_RW;
printf("remapping framebuffer at pa 0x%x to va 0x%x using BAT\n",
ka->fb.mapping.start, 0x10000000 + ka->fb.mapping.start - framebuffer_phys);
s2_change_framebuffer_addr(ka, 0x10000000 + ka->fb.mapping.start - framebuffer_phys);
}
#endif
setibats(ibats);
setdbats(dbats);
tlbia();
printf("unsetting the old page table\n");
setsdr1(0);
tlbia();
printf("memsetting new pagetable\n");
memset(ptable, 0, ptable_size);
printf("done\n");
printf("setting up the 16 segment registers\n");
// set up the segment registers
for(i=0; i<16; i++) {
setsr(i * 0x10000000, i);
}
printf("done, setting sdr1\n");
setsdr1(((unsigned int)ptable & 0xffff0000) | (ptable_hash_mask >> 10));
tlbia();
printf("sdr1 = 0x%x\n", getsdr1());
#if 0
mmu_map_page(0x96008000, 0x96008000);
mmu_map_page(0x96009000, 0x96009000);
mmu_map_page(0x9600a000, 0x9600a000);
mmu_map_page(0x96008000, 0x30000000);
printf("testing...\n");
printf("hello\n");
printf("%d\n", *(int *)0x30000000);
printf("%d\n", *(int *)0x96008000);
*(int *)0x30000000 = 0x99;
printf("%d\n", *(int *)0x30000000);
printf("%d\n", *(int *)0x96008000);
printf("hello2\n");
#endif
printf("done\n");
return 0;
}
int s2_mmu_remap_pagetable(kernel_args *ka)
{
unsigned long i;
unsigned long new_ptable;
// find a new spot to allocate the page table
// XXX make better
new_ptable = ka->virt_alloc_range[0].start + ka->virt_alloc_range[0].size;
for(i = 0; i < ptable_size; i += PAGE_SIZE) {
mmu_map_page(ka, ka->arch_args.page_table.start + i, new_ptable + i, true);
}
ka->arch_args.page_table_virt.start = new_ptable;
ka->arch_args.page_table_virt.size = ka->arch_args.page_table.size;
}
int s2_mmu_remove_fb_bat_entries(kernel_args *ka)
{
unsigned int ibat[8];
unsigned int dbat[8];
// zero out the 2nd bat entry, used to map the framebuffer
getibats(ibat);
getdbats(dbat);
ibat[2] = ibat[3] = dbat[2] = dbat[3] = 0;
setibats(ibat);
setdbats(dbat);
return NO_ERROR;
}
static void print_pte(struct ppc_pte *e)
{
printf("entry %p: ", e);
printf("v %d ", e->v);
if(e->v) {
printf("vsid 0x%x ", e->vsid);
printf("hash %d ", e->hash);
printf("api 0x%x ", e->api);
printf("ppn 0x%x ", e->ppn);
printf("r %d ", e->r);
printf("c %d ", e->c);
printf("wimg 0x%x ", e->wimg);
printf("pp 0x%x ", e->pp);
}
printf("\n");
}
void mmu_map_page(kernel_args *ka, unsigned long pa, unsigned long va, bool cached)
{
unsigned int hash;
struct ppc_pteg *pteg;
int i;
unsigned int vsid;
// mark it used if this is in the kernel area
if(va >= KERNEL_BASE) {
mark_used_virt_mem_range(ka, va, PAGE_SIZE);
}
// lookup the vsid based off the va
vsid = getsr(va) & 0xffffff;
// printf("mmu_map_page: vsid %d, pa 0x%x, va 0x%x\n", vsid, pa, va);
hash = primary_hash(vsid, va);
// printf("hash = 0x%x\n", hash);
pteg = &ptable[hash];
// printf("pteg @ 0x%x\n", pteg);
// search for the first free slot for this pte
for(i=0; i<8; i++) {
// printf("trying pteg[%i]\n", i);
if(pteg->pte[i].v == 0) {
// upper word
pteg->pte[i].ppn = pa / PAGE_SIZE;
pteg->pte[i].unused = 0;
pteg->pte[i].r = 0;
pteg->pte[i].c = 0;
pteg->pte[i].wimg = cached ? 0 : (1 << 3);
pteg->pte[i].unused1 = 0;
pteg->pte[i].pp = 0x2; // RW
asm volatile("eieio");
// lower word
pteg->pte[i].vsid = vsid;
pteg->pte[i].hash = 0; // primary
pteg->pte[i].api = (va >> 22) & 0x3f;
pteg->pte[i].v = 1;
tlbia();
// printf("set pteg to ");
// print_pte(&pteg->pte[i]);
// printf("set pteg to 0x%x 0x%x\n", *((int *)&pteg->pte[i]), *(((int *)&pteg->pte[i])+1));
return;
}
}
}
static unsigned int primary_hash(unsigned int vsid, unsigned int vaddr)
{
unsigned int page_index;
vsid &= 0x7ffff;
page_index = (vaddr >> 12) & 0xffff;
return (vsid ^ page_index) & ptable_hash_mask;
}
static unsigned int secondary_hash(unsigned int primary_hash)
{
return ~primary_hash;
}
| dioptre/newos | boot/ppc/stage2_mmu.c | C | bsd-3-clause | 16,217 |
// Copyright Neil Groves 2009. Use, modification and
// distribution is subject to the Boost Software License, Version
// 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
//
// For more information, see http://www.boost.org/libs/range/
//
#ifndef BOOST_RANGE_DETAIL_RANGE_RETURN_HPP_INCLUDED
#define BOOST_RANGE_DETAIL_RANGE_RETURN_HPP_INCLUDED
#include <boost/range/begin.hpp>
#include <boost/range/end.hpp>
#include <boost/range/iterator_range.hpp>
namespace pdalboost
{
enum range_return_value
{
// (*) indicates the most common values
return_found, // only the found resulting iterator (*)
return_next, // next(found) iterator
return_prior, // prior(found) iterator
return_begin_found, // [begin, found) range (*)
return_begin_next, // [begin, next(found)) range
return_begin_prior, // [begin, prior(found)) range
return_found_end, // [found, end) range (*)
return_next_end, // [next(found), end) range
return_prior_end, // [prior(found), end) range
return_begin_end // [begin, end) range
};
template< class SinglePassRange, range_return_value >
struct range_return
{
typedef pdalboost::iterator_range<
BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type > type;
static type pack(BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type found,
SinglePassRange& rng)
{
return type(found, pdalboost::end(rng));
}
};
template< class SinglePassRange >
struct range_return< SinglePassRange, return_found >
{
typedef BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type type;
static type pack(type found, SinglePassRange&)
{
return found;
}
};
template< class SinglePassRange >
struct range_return< SinglePassRange, return_next >
{
typedef BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type type;
static type pack(type found, SinglePassRange& rng)
{
return found == pdalboost::end(rng)
? found
: pdalboost::next(found);
}
};
template< class BidirectionalRange >
struct range_return< BidirectionalRange, return_prior >
{
typedef BOOST_DEDUCED_TYPENAME range_iterator<BidirectionalRange>::type type;
static type pack(type found, BidirectionalRange& rng)
{
return found == pdalboost::begin(rng)
? found
: pdalboost::prior(found);
}
};
template< class SinglePassRange >
struct range_return< SinglePassRange, return_begin_found >
{
typedef pdalboost::iterator_range<
BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type > type;
static type pack(BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type found,
SinglePassRange& rng)
{
return type(pdalboost::begin(rng), found);
}
};
template< class SinglePassRange >
struct range_return< SinglePassRange, return_begin_next >
{
typedef pdalboost::iterator_range<
BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type > type;
static type pack(BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type found,
SinglePassRange& rng)
{
return type( pdalboost::begin(rng),
found == pdalboost::end(rng) ? found : pdalboost::next(found) );
}
};
template< class BidirectionalRange >
struct range_return< BidirectionalRange, return_begin_prior >
{
typedef pdalboost::iterator_range<
BOOST_DEDUCED_TYPENAME range_iterator<BidirectionalRange>::type > type;
static type pack(BOOST_DEDUCED_TYPENAME range_iterator<BidirectionalRange>::type found,
BidirectionalRange& rng)
{
return type( pdalboost::begin(rng),
found == pdalboost::begin(rng) ? found : pdalboost::prior(found) );
}
};
template< class SinglePassRange >
struct range_return< SinglePassRange, return_found_end >
{
typedef pdalboost::iterator_range<
BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type > type;
static type pack(BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type found,
SinglePassRange& rng)
{
return type(found, pdalboost::end(rng));
}
};
template< class SinglePassRange >
struct range_return< SinglePassRange, return_next_end >
{
typedef pdalboost::iterator_range<
BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type > type;
static type pack(BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type found,
SinglePassRange& rng)
{
return type( found == pdalboost::end(rng) ? found : pdalboost::next(found),
pdalboost::end(rng) );
}
};
template< class BidirectionalRange >
struct range_return< BidirectionalRange, return_prior_end >
{
typedef pdalboost::iterator_range<
BOOST_DEDUCED_TYPENAME range_iterator<BidirectionalRange>::type > type;
static type pack(BOOST_DEDUCED_TYPENAME range_iterator<BidirectionalRange>::type found,
BidirectionalRange& rng)
{
return type( found == pdalboost::begin(rng) ? found : pdalboost::prior(found),
pdalboost::end(rng) );
}
};
template< class SinglePassRange >
struct range_return< SinglePassRange, return_begin_end >
{
typedef pdalboost::iterator_range<
BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type > type;
static type pack(BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type,
SinglePassRange& rng)
{
return type(pdalboost::begin(rng), pdalboost::end(rng));
}
};
}
#endif // include guard
| lucadelu/PDAL | vendor/pdalboost/boost/range/detail/range_return.hpp | C++ | bsd-3-clause | 6,270 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "webkit/media/webmediaplayer_params.h"
#include "media/base/audio_renderer_sink.h"
#include "media/base/media_log.h"
namespace webkit_media {
WebMediaPlayerParams::WebMediaPlayerParams(
const scoped_refptr<media::AudioRendererSink>& audio_renderer_sink,
const scoped_refptr<media::GpuVideoDecoder::Factories>& gpu_factories,
const scoped_refptr<media::MediaLog>& media_log)
: audio_renderer_sink_(audio_renderer_sink),
gpu_factories_(gpu_factories),
media_log_(media_log) {
DCHECK(media_log_);
}
WebMediaPlayerParams::~WebMediaPlayerParams() {}
} // namespace webkit_media
| codenote/chromium-test | webkit/media/webmediaplayer_params.cc | C++ | bsd-3-clause | 788 |
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class PrintTests(TranspileTestCase):
def test_fileobj(self):
self.assertCodeExecution("""
class FileLikeObject:
def __init__(self):
self.buffer = ''
def write(self, content):
self.buffer = self.buffer + (content * 2)
out = FileLikeObject()
print('hello', 'world', file=out)
print('goodbye', 'world', file=out)
print()
""")
def test_sep(self):
self.assertCodeExecution("""
print('hello world', 'goodbye world', sep='-')
print()
""")
def test_end(self):
self.assertCodeExecution("""
print('hello world', 'goodbye world', end='-')
print()
""")
def test_flush(self):
self.assertCodeExecution("""
print('hello world', 'goodbye world', flush=True)
print()
""")
def test_combined(self):
self.assertCodeExecution("""
class FileLikeObject:
def __init__(self):
self.buffer = ''
def write(self, content):
self.buffer = self.buffer + (content * 2)
def flush(self):
self.buffer = self.buffer + '<<<'
out = FileLikeObject()
print('hello', 'world', sep='*', end='-', file=out, flush=True)
print('goodbye', 'world', file=out, sep='-', end='*')
print()
""")
class BuiltinPrintFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["print"]
not_implemented = [
'test_class',
'test_frozenset',
'test_slice',
]
| pombredanne/voc | tests/builtins/test_print.py | Python | bsd-3-clause | 1,816 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN">
<html>
<head>
<title>Forms :: submit input : float alongside inline content</title>
<style type="text/css">
form * {
font-family: Ahem;
font-size: 1em;
line-height: 1em;
}
fieldset, div {
color: #3366CC;
background-color: red;
padding: 0;
margin: 0;
border: none;
width: 22em;
}
input, span {
color: black;
padding: 0;
margin: 0;
border: none;
width: 1em;
height: 1em;
float: left;
}
h4 {clear: both;}
</style>
</head>
<body>
<form action="">
<fieldset>
<input type="submit" value="x">xxxxxxxxxx<input type="submit" value="x">xxxxxxxxxx
</fieldset>
<div>
<input type="submit" value="x">xxxxxxxxxx<input type="submit" value="x">xxxxxxxxxx
</div>
<div>
<span>x</span>xxxxxxxxxx<span>x</span>xxxxxxxxxx
</div>
</form>
<h4>Ahem font required for this test</h4>
<p>you should see an unbroken black and blue bar with no red</p>
</body>
</html> | frivoal/presto-testo | core/standards/forms/input-submit-float-inline.html | HTML | bsd-3-clause | 920 |
/*
* Copyright (c) 2019, The OpenThread Authors.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @file
* This file includes k32w061 compile-time configuration constants
* for OpenThread.
*/
#ifndef OPENTHREAD_CORE_K32W061_CONFIG_H_
#define OPENTHREAD_CORE_K32W061_CONFIG_H_
/**
* @def OPENTHREAD_CONFIG_LOG_OUTPUT
*
* The emsk platform provides an otPlatLog() function.
*/
#ifndef OPENTHREAD_CONFIG_LOG_OUTPUT /* allow command line override */
#define OPENTHREAD_CONFIG_LOG_OUTPUT OPENTHREAD_CONFIG_LOG_OUTPUT_PLATFORM_DEFINED
#endif
/**
* @def OPENTHREAD_CONFIG_PLATFORM_INFO
*
* The platform-specific string to insert into the OpenThread version string.
*
*/
#define OPENTHREAD_CONFIG_PLATFORM_INFO "K32W061"
/**
* @def SETTINGS_CONFIG_BASE_ADDRESS
*
* The base address of settings.
*
*/
#define SETTINGS_CONFIG_BASE_ADDRESS 0
/**
* @def SETTINGS_CONFIG_PAGE_SIZE
*
* The page size of settings.
*
*/
#define SETTINGS_CONFIG_PAGE_SIZE 0x200
/**
* @def SETTINGS_CONFIG_PAGE_NUM
*
* The page number of settings.
*
*/
#define SETTINGS_CONFIG_PAGE_NUM 64
/**
* @def RADIO_CONFIG_SRC_MATCH_ENTRY_NUM
*
* The number of source address table entries.
*
*/
#define RADIO_CONFIG_SRC_MATCH_ENTRY_NUM 128
/**
* @def OPENTHREAD_CONFIG_ENABLE_SOFTWARE_RETRANSMIT
*
* Define to 1 if you want to enable software retransmission logic.
*
*/
/* TODO */
/**
* @def OPENTHREAD_CONFIG_ENABLE_SOFTWARE_CSMA_BACKOFF
*
* Define to 1 if you want to enable software CSMA-CA backoff logic.
*
*/
/* TODO */
/**
* @def OPENTHREAD_CONFIG_NCP_UART_ENABLE
*
* Define to 1 to enable NCP UART support.
*
*/
#define OPENTHREAD_CONFIG_NCP_UART_ENABLE 1
/**
* @def OPENTHREAD_SETTINGS_RAM
*
* Define to 1 if you want to use K32W061 Flash implementation.
*
*/
#define OPENTHREAD_SETTINGS_RAM 0
/**
* @def OPENTHREAD_CONFIG_NCP_TX_BUFFER_SIZE
*
* The size of NCP message buffer in bytes.
*
*/
#define OPENTHREAD_CONFIG_NCP_TX_BUFFER_SIZE 1024
/**
* @def OPENTHREAD_CONFIG_HEAP_INTERNAL_SIZE
*
* The size of heap buffer when DTLS is enabled.
*
*/
#ifndef OPENTHREAD_CONFIG_HEAP_INTERNAL_SIZE
#define OPENTHREAD_CONFIG_HEAP_INTERNAL_SIZE (2048 * sizeof(void *))
#endif
/**
* @def OPENTHREAD_CONFIG_COAP_API_ENABLE
*
* Define to 1 to enable the CoAP API.
*
*/
#define OPENTHREAD_CONFIG_COAP_API_ENABLE 1
/**
* @def OPENTHREAD_CONFIG_JOINER_ENABLE
*
* Define to 1 to enable Joiner support.
*
*/
#define OPENTHREAD_CONFIG_JOINER_ENABLE 1
/**
* @def OPENTHREAD_CONFIG_COMMISSIONER_ENABLE
*
* Define to 1 to enable Commissioner support.
*
*/
#define OPENTHREAD_CONFIG_COMMISSIONER_ENABLE 1
/**
* @def OPENTHREAD_CONFIG_UDP_FORWARD_ENABLE
*
* Define to 1 to enable UDP forward support.
*
*/
#define OPENTHREAD_CONFIG_UDP_FORWARD_ENABLE 1
/**
* @def OPENTHREAD_CONFIG_BORDER_ROUTER_ENABLE
*
* Define to 1 to enable the Border Router service.
*
*/
#define OPENTHREAD_CONFIG_BORDER_ROUTER_ENABLE 1
/**
* @def OPENTHREAD_CONFIG_DHCP6_CLIENT_ENABLE
*
* Define to 1 to enable the DHCP CLIENT service.
*
*/
#define OPENTHREAD_CONFIG_DHCP6_CLIENT_ENABLE 1
/**
* @def OPENTHREAD_CONFIG_DHCP6_SERVER_ENABLE
*
* Define to 1 to enable the DHCP SERVER service.
*
*/
#define OPENTHREAD_CONFIG_DHCP6_SERVER_ENABLE 1
/**
* @def OPENTHREAD_CONFIG_TIME_SYNC_ENABLE
*
* Define as 1 to enable the time synchronization service feature.
*
*/
#ifndef OPENTHREAD_CONFIG_TIME_SYNC_ENABLE
#define OPENTHREAD_CONFIG_TIME_SYNC_ENABLE 0
#endif
/**
* @def OPENTHREAD_CONFIG_DIAG_ENABLE
*
* Define as 1 to enable the diag feature.
*
*/
#ifndef OPENTHREAD_CONFIG_DIAG_ENABLE
#define OPENTHREAD_CONFIG_DIAG_ENABLE 0
#endif
#endif // OPENTHREAD_CORE_K32W061_CONFIG_H_
| chshu/openthread | examples/platforms/k32w/k32w061/openthread-core-k32w061-config.h | C | bsd-3-clause | 5,242 |
<html>
<head>
<title>GB18030 lead 823191</title>
<meta http-equiv='content-type' content='text/html;charset=GB18030'>
<link rel='stylesheet' href='tests.css'>
</head>
<body>
<table>
<caption>Four-byte lead 823191</caption>
<tr><th colspan=2>GB18030<th colspan=3>Unicode
<tr><td>=82319130<td> 10 <td>U+3A37<td>㨷<td class=u>to wipe; to cleanse, (interchangeable &#232;) difficult; distress; harship
<tr><td>=82319131<td> 11 <td>U+3A38<td>㨸<td class=u>to hit; to strike; to beat
<tr><td>=82319132<td> 12 <td>U+3A39<td>㨹<td class=u>to crack; to break; to rip open, to split or divide up, to hang up; to suspend, to sweep clean; to eliminate
<tr><td>=82319133<td> 13 <td>U+3A3A<td>㨺<td class=u>to ornament; to polish; to decorate, (same as &#230;) to feel; to touch with hands; to hold, to search (in one's pocket, etc.)
<tr><td>=82319134<td> 14 <td>U+3A3B<td>㨻<td class=u>to cut; to kill; to behead, gradually; little by little; by degrees, to lift; to raise, to strike; to beat, to throw; to pitch; to deliver
<tr><td>=82319135<td> 15 <td>U+3A3C<td>㨼<td class=u>(non-classical form of U+63A0 &#230;) to take by force, to throw aside
<tr><td>=82319136<td> 16 <td>U+3A3D<td>㨽<td class=u>to cut or rip open
<tr><td>=82319137<td> 17 <td>U+3A3E<td>㨾<td class=u>a type; a model; a mode; a style
<tr><td>=82319138<td> 18 <td>U+3A3F<td>㨿<td class=u>(a variant of &#230;) to receive, as communications from a subordinate, to rely on, to lean on, evidence; proof, according to; whereas
<tr><td>=82319139<td> 19 <td>U+3A40<td>㩀<td class=u>(a variant of &#230;) to receive, as communications from a subordinate, to rely on, to lean on, evidence; proof, according to; whereas
</table>
<p><a href='charset/GB18030.html'>Return</a></p>
</body>
</html>
| frivoal/presto-testo | imported/peter/unicode/comparative/GB18030-823191.html | HTML | bsd-3-clause | 1,896 |
<!doctype html>
<title>textarea resize | negative resize</title>
<style>
textarea {
width: 100px;
height: 100px;
}
</style>
<p>There should be a textarea below, with a resize handle visible in
the bottom right corner.</p>
<textarea></textarea>
<script src="resize.js"></script>
<script>
window.onload = function()
{
resize_textarea(200, 200, function(){
resize_textarea(-100, -100);
});
}
</script>
| frivoal/presto-testo | css/resize/textarea-6.html | HTML | bsd-3-clause | 436 |
/*
* FFV1 encoder template
*
* Copyright (c) 2003-2016 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
static av_always_inline int RENAME(encode_line)(FFV1Context *s, int w,
TYPE *sample[3],
int plane_index, int bits)
{
PlaneContext *const p = &s->plane[plane_index];
RangeCoder *const c = &s->c;
int x;
int run_index = s->run_index;
int run_count = 0;
int run_mode = 0;
if (s->ac != AC_GOLOMB_RICE) {
if (c->bytestream_end - c->bytestream < w * 35) {
av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n");
return AVERROR_INVALIDDATA;
}
} else {
if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) < w * 4) {
av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n");
return AVERROR_INVALIDDATA;
}
}
if (s->slice_coding_mode == 1) {
for (x = 0; x < w; x++) {
int i;
int v = sample[0][x];
for (i = bits-1; i>=0; i--) {
uint8_t state = 128;
put_rac(c, &state, (v>>i) & 1);
}
}
return 0;
}
for (x = 0; x < w; x++) {
int diff, context;
context = RENAME(get_context)(p, sample[0] + x, sample[1] + x, sample[2] + x);
diff = sample[0][x] - RENAME(predict)(sample[0] + x, sample[1] + x);
if (context < 0) {
context = -context;
diff = -diff;
}
diff = fold(diff, bits);
if (s->ac != AC_GOLOMB_RICE) {
if (s->flags & AV_CODEC_FLAG_PASS1) {
put_symbol_inline(c, p->state[context], diff, 1, s->rc_stat,
s->rc_stat2[p->quant_table_index][context]);
} else {
put_symbol_inline(c, p->state[context], diff, 1, NULL, NULL);
}
} else {
if (context == 0)
run_mode = 1;
if (run_mode) {
if (diff) {
while (run_count >= 1 << ff_log2_run[run_index]) {
run_count -= 1 << ff_log2_run[run_index];
run_index++;
put_bits(&s->pb, 1, 1);
}
put_bits(&s->pb, 1 + ff_log2_run[run_index], run_count);
if (run_index)
run_index--;
run_count = 0;
run_mode = 0;
if (diff > 0)
diff--;
} else {
run_count++;
}
}
ff_dlog(s->avctx, "count:%d index:%d, mode:%d, x:%d pos:%d\n",
run_count, run_index, run_mode, x,
(int)put_bits_count(&s->pb));
if (run_mode == 0)
put_vlc_symbol(&s->pb, &p->vlc_state[context], diff, bits);
}
}
if (run_mode) {
while (run_count >= 1 << ff_log2_run[run_index]) {
run_count -= 1 << ff_log2_run[run_index];
run_index++;
put_bits(&s->pb, 1, 1);
}
if (run_count)
put_bits(&s->pb, 1, 1);
}
s->run_index = run_index;
return 0;
}
static int RENAME(encode_rgb_frame)(FFV1Context *s, const uint8_t *src[4],
int w, int h, const int stride[4])
{
int x, y, p, i;
const int ring_size = s->context_model ? 3 : 2;
TYPE *sample[4][3];
int lbd = s->bits_per_raw_sample <= 8;
int packed = !src[1];
int bits = s->bits_per_raw_sample > 0 ? s->bits_per_raw_sample : 8;
int offset = 1 << bits;
int transparency = s->transparency;
int packed_size = (3 + transparency)*2;
s->run_index = 0;
memset(RENAME(s->sample_buffer), 0, ring_size * MAX_PLANES *
(w + 6) * sizeof(*RENAME(s->sample_buffer)));
for (y = 0; y < h; y++) {
for (i = 0; i < ring_size; i++)
for (p = 0; p < MAX_PLANES; p++)
sample[p][i]= RENAME(s->sample_buffer) + p*ring_size*(w+6) + ((h+i-y)%ring_size)*(w+6) + 3;
for (x = 0; x < w; x++) {
int b, g, r, av_uninit(a);
if (lbd) {
unsigned v = *((const uint32_t*)(src[0] + x*4 + stride[0]*y));
b = v & 0xFF;
g = (v >> 8) & 0xFF;
r = (v >> 16) & 0xFF;
a = v >> 24;
} else if (packed) {
const uint16_t *p = ((const uint16_t*)(src[0] + x*packed_size + stride[0]*y));
r = p[0];
g = p[1];
b = p[2];
if (transparency)
a = p[3];
} else if (sizeof(TYPE) == 4 || transparency) {
g = *((const uint16_t *)(src[0] + x*2 + stride[0]*y));
b = *((const uint16_t *)(src[1] + x*2 + stride[1]*y));
r = *((const uint16_t *)(src[2] + x*2 + stride[2]*y));
if (transparency)
a = *((const uint16_t *)(src[3] + x*2 + stride[3]*y));
} else {
b = *((const uint16_t *)(src[0] + x*2 + stride[0]*y));
g = *((const uint16_t *)(src[1] + x*2 + stride[1]*y));
r = *((const uint16_t *)(src[2] + x*2 + stride[2]*y));
}
if (s->slice_coding_mode != 1) {
b -= g;
r -= g;
g += (b * s->slice_rct_by_coef + r * s->slice_rct_ry_coef) >> 2;
b += offset;
r += offset;
}
sample[0][0][x] = g;
sample[1][0][x] = b;
sample[2][0][x] = r;
sample[3][0][x] = a;
}
for (p = 0; p < 3 + transparency; p++) {
int ret;
sample[p][0][-1] = sample[p][1][0 ];
sample[p][1][ w] = sample[p][1][w-1];
if (lbd && s->slice_coding_mode == 0)
ret = RENAME(encode_line)(s, w, sample[p], (p + 1) / 2, 9);
else
ret = RENAME(encode_line)(s, w, sample[p], (p + 1) / 2, bits + (s->slice_coding_mode != 1));
if (ret < 0)
return ret;
}
}
return 0;
}
| endlessm/chromium-browser | third_party/ffmpeg/libavcodec/ffv1enc_template.c | C | bsd-3-clause | 7,075 |
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package runtime
import "unsafe"
// The calls to nop are to keep these functions from being inlined.
// If they are inlined we have no guarantee that later rewrites of the
// code by optimizers will preserve the relative order of memory accesses.
//go:nosplit
func atomicload(ptr *uint32) uint32 {
nop()
return *ptr
}
//go:nosplit
func atomicloadp(ptr unsafe.Pointer) unsafe.Pointer {
nop()
return *(*unsafe.Pointer)(ptr)
}
//go:nosplit
func xadd64(ptr *uint64, delta int64) uint64 {
for {
old := *ptr
if cas64(ptr, old, old+uint64(delta)) {
return old + uint64(delta)
}
}
}
//go:nosplit
func xchg64(ptr *uint64, new uint64) uint64 {
for {
old := *ptr
if cas64(ptr, old, new) {
return old
}
}
}
//go:noescape
func xadd(ptr *uint32, delta int32) uint32
//go:noescape
func xchg(ptr *uint32, new uint32) uint32
// NO go:noescape annotation; see atomic_pointer.go.
func xchgp1(ptr unsafe.Pointer, new unsafe.Pointer) unsafe.Pointer
//go:noescape
func xchguintptr(ptr *uintptr, new uintptr) uintptr
//go:noescape
func atomicload64(ptr *uint64) uint64
//go:noescape
func atomicor8(ptr *uint8, val uint8)
//go:noescape
func cas64(ptr *uint64, old, new uint64) bool
//go:noescape
func atomicstore(ptr *uint32, val uint32)
//go:noescape
func atomicstore64(ptr *uint64, val uint64)
// NO go:noescape annotation; see atomic_pointer.go.
func atomicstorep1(ptr unsafe.Pointer, val unsafe.Pointer)
| frobware/go | src/runtime/atomic_386.go | GO | bsd-3-clause | 1,585 |
/**
* License and Terms of Use
*
* Copyright (c) 2011 SignpostMarv
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
(function(window, undefined){
var
Array = window['Array'],
EventTarget = window['EventTarget'],
mapapi = window['mapapi'],
gridPoint = mapapi['gridPoint'],
bounds = mapapi['bounds'],
ctype_digit = mapapi['utils']['ctype_digit']
;
if(mapapi == undefined){
throw 'mapapi.js is not loaded.';
}else if(EventTarget == undefined){
throw 'EventTarget is not loaded';
}
function extend(a,b){
a.prototype = new b;
a.prototype['constructor'] = a;
}
function shape(options){
EventTarget['call'](this);
this['opts'] = {};
for(var i in this['defaultOpts']){
this['opts'][i] = this['defaultOpts'][i];
}
if(options != undefined){
this['options'](options);
}
}
extend(shape, EventTarget);
shape.prototype['defaultOpts'] = {'fillStyle':'rgba(255,255,255,0.5)', 'strokeStyle':'rgb(255,255,255)', 'lineWidth':0};
shape.prototype['options'] = function(options){
options = options || {};
for(var i in options){
this['opts'] = options[i];
}
}
shape.prototype['withinShape'] = function(pos){
if(pos instanceof gridPoint){
return true;
}
return false;
}
shape.prototype['coords'] = function(value){
if(value != undefined){
this['options']({'coords':value});
}
var
coords = this['opts']['coords']
;
return coords != undefined ? coords : [];
}
shape.prototype['clickable'] = function(value){
if(value != undefined){
this['options']({'clickable':!!value});
}
var
clickable = this['opts']['clickable'];
;
return clickable != undefined ? clickable : false;
}
shape.prototype['strokeStyle'] = function(value){
if(typeof value == 'string'){
this['options']({'strokeStyle':value});
}
return this['opts']['strokeStyle'];
}
shape.prototype['lineWidth'] = function(value){
if(typeof value == 'number'){
this['options']({'lineWidth':Math.max(0,value)});
}
return Math.max(0, this['opts']['lineWidth']);
}
shape.prototype['intersects'] = function(value){
if(value instanceof bounds && this['bounds'] instanceof bounds){
return this['bounds']['intersects'](value);
}
return false;
}
mapapi['shape'] = shape;
function shapeManager(){
Array['call'](this);
}
extend(shapeManager, Array);
shapeManager.prototype['push'] = function(){
for(var i=0;i<arguments['length'];++i){
if(!(arguments[i] instanceof shape)){
throw 'Arguments of mapapi.shapeManager::push() should be instances of mapapi.shape';
}
}
Array.prototype['push']['apply'](this, arguments);
}
shapeManager.prototype['intersects'] = function(value){
if(value instanceof bounds){
var
shpmngr = new this['constructor']
;
for(var i=0;i<this['length'];++i){
if(this[i]['intersects'](value)){
shpmngr['push'](this[i]);
}
}
return shpmngr;
}else{
throw 'Intersection argument must be an instance of mapapi.bounds';
}
}
shapeManager.prototype['click'] = function(value){
var
value = gridPoint['fuzzy'](value),
ret
;
for(var i=0;i<this['length'];++i){
if(this[i]['clickable']() && this[i]['withinShape'](value)){
ret = this[i]['fire']('click',{'pos':value});
if(ret != undefined && ret == false){
break;
}
}
}
}
mapapi['shapeManager'] = shapeManager;
function poly(options){
shape['call'](this, options);
}
extend(poly, shape);
poly.prototype['options'] = function(options){
var
options = options || {},
coords = options['coords'],
fillStyle = options['fillStyle'],
strokeStyle = options['strokeStyle'],
lineWidth = options['lineWidth']
;
if(options['coords'] != undefined){
if(coords instanceof Array){
for(var i=0;i<coords['length'];++i){
coords[i] = gridPoint['fuzzy'](coords[i]);
}
var
swx = coords[0]['x'],
swy = coords[0]['y'],
nex = coords[0]['x'],
ney = coords[0]['y']
;
for(var i=1;i<coords['length'];++i){
swx = (coords[i]['x'] < swx) ? coords[i]['x'] : swx;
swy = (coords[i]['y'] < swy) ? coords[i]['y'] : swy;
nex = (coords[i]['x'] > nex) ? coords[i]['x'] : nex;
ney = (coords[i]['y'] > ney) ? coords[i]['y'] : ney;
}
this['bounds'] = new bounds(new gridPoint(swx, swy), new gridPoint(nex, ney));
this['opts']['coords'] = coords;
this['fire']('changedcoords');
}else{
throw 'coords must be array';
}
}
if(typeof fillStyle == 'string'){
var diff = this['opts']['fillStyle'] != fillStyle;
this['opts']['fillStyle'] = fillStyle;
if(diff){
this['fire']('changedfillstyle');
}
}
if(typeof strokeStyle == 'string'){
var diff = this['opts']['strokeStyle'] != strokeStyle;
this['opts']['strokeStyle'] = strokeStyle;
if(diff){
this['fire']('changedstrokestyle');
}
}
if(typeof lineWidth == 'number'){
var diff = this['opts']['lineWidth'] != Math.max(0,lineWidth);
this['opts']['lineWidth'] = Math.max(0,lineWidth);
if(diff){
this['fire']('changedlinewidth');
}
}
if(options['clickable'] != undefined){
this['opts']['clickable'] = !!options['clickable'];
}
}
poly.prototype['fillStyle'] = function(value){
if(value != undefined){
this['options']({'fillStyle':value});
}
return this['opts']['fillStyle'];
}
shape['polygon'] = poly;
function rectangle(options){
poly['call'](this, options);
}
extend(rectangle, poly);
rectangle.prototype['options'] = function(options){
var
options = options || {},
coords = options['coords']
;
if(coords != undefined){
if(coords instanceof Array){
if(coords['length'] == 2){
for(var i=0;i<coords['length'];++i){
coords[i] = gridPoint['fuzzy'](coords[i]);
}
var
sw = coords[0],
ne = coords[1],
foo,bar
;
if(ne['y'] > sw['y']){
foo = new gridPoint(ne['x'], sw['y']);
bar = new gridPoint(sw['x'], ne['y']);
ne = foo;
sw = bar;
}
if(sw['x'] > ne['x']){
foo = new gridPoint(ne['x'], sw['y']);
bar = new gridPoint(sw['x'], ne['y']);
sw = foo;
ne = bar;
}
options['coords'] = [sw, ne];
}else{
throw 'When supplying mapapi.shape.rectangle::options with an Array for the coordinates, there should only be two entries';
}
}else{
throw 'something other than array was given to mapapi.shape.rectangle::options';
}
}
poly.prototype['options']['call'](this, options);
}
rectangle.prototype['withinShape'] = function(value){
if(value == undefined){
throw 'Must specify an instance of mapapi.gridPoint';
}else if(!(this['bounds'] instanceof bounds)){
throw 'Coordinates not set';
}
value = gridPoint['fuzzy'](value);
return this['bounds']['isWithin'](value);
}
shape['rectangle'] = rectangle;
function square(options){
rectangle['call'](this, options);
}
extend(square, rectangle);
square.prototype['options'] = function(options){
options = options || {};
var
coords = options['coords']
;
if(coords instanceof Array && coords['length'] <= 2){
var
sw = coords[0],
ne = coords[1]
;
if(Math.abs(ne['x'] - sw['x']) != Math.abs(ne['y'] - sw['y'])){
throw 'coordinates should form a square';
}
}
rectangle.prototype['options']['call'](this, options);
}
shape['square'] = square;
function line(options){
shape['call'](this, options);
}
extend(line, shape);
line.prototype['defaultOpts'] = {'strokeStyle':'rgb(255,255,255)', 'lineWidth':1};
line.prototype['options'] = function(options){
var
options = options || {},
coords = options['coords'],
strokeStyle = options['strokeStyle'],
lineWidth = options['lineWidth']
;
if(options['coords'] != undefined){
if(coords instanceof Array){
if(coords['length'] >= 2){
for(var i=0;i<coords['length'];++i){
coords[i] = gridPoint['fuzzy'](coords[i]);
}
this['opts']['coords'] = coords;
this['fire']('changedcoords');
}else{
throw 'mapapi.shape.line requires two or more coordinates';
}
}else{
throw 'mapapi.shape.line requires coordinates be passed as an array';
}
}
if(typeof strokeStyle == 'string'){
var diff = this['opts']['strokeStyle'] != strokeStyle;
this['opts']['strokeStyle'] = strokeStyle;
if(diff){
this['fire']('changedstrokestyle');
}
}
if(ctype_digit(lineWidth)){
lineWidth = Math.max(0,lineWidth * 1);
var diff = this['opts']['lineWidth'] != lineWidth;
this['opts']['lineWidth'] = lineWidth;
if(diff){
this['fire']('changedlinewidth');
}
}
if(options['clickable'] != undefined){
this['opts']['clickable'] = !!options['clickable'];
}
}
line.prototype['intersects'] = function(value){
if(value instanceof bounds){
var
coords = this['coords']()
;
for(var i=0;i<coords['length'];++i){
if(value['isWithin'](coords[i])){
return true;
}
}
}
return false;
}
shape['line'] = line;
function circle(options){
shape['call'](this, options);
}
extend(circle, shape);
circle.prototype['options'] = function(options){
var
opts = this['opts'],
options = options || {},
coords = options['coords'],
radius = options['radius'],
strokeStyle = options['strokeStyle'],
lineWidth = options['lineWidth'],
diffPos=false,diffRadius=false,diff
;
if(coords != undefined){
coords[0] = gridPoint['fuzzy'](coords[0]);
diffPos = opts['coords'] == undefined || !pos['equals'](opts['coords'][0]);
opts['coords'] = [coords[0]];
}
if(radius != undefined){
if(typeof radius != 'number'){
throw 'radius should be specified as a number';
}else if(radius <= 0){
throw 'radius should be greater than zero';
}
diffRadius = radius != opts['radius'];
opts['radius'] = radius;
}
if(diffPos || diffRadius){
this['fire']('changedcoords');
}
if(typeof fillStyle == 'string'){
var diff = this['opts']['fillStyle'] != fillStyle;
this['opts']['fillStyle'] = fillStyle;
if(diff){
this['fire']('changedfillstyle');
}
}
if(typeof strokeStyle == 'string'){
var diff = this['opts']['strokeStyle'] != strokeStyle;
this['opts']['strokeStyle'] = strokeStyle;
if(diff){
this['fire']('changedstrokestyle');
}
}
if(typeof lineWidth == 'number'){
var diff = this['opts']['lineWidth'] != Math.max(0,lineWidth);
this['opts']['lineWidth'] = Math.max(0,lineWidth);
if(diff){
this['fire']('changedlinewidth');
}
}
if(options['clickable'] != undefined){
this['opts']['clickable'] = !!options['clickable'];
}
}
circle.prototype['radius'] = function(value){
if(value != undefined){
this['options']({'radius':value});
}
return this['opts']['radius'];
}
circle.prototype['fillStyle'] = function(value){
if(value != undefined){
this['options']({'fillStyle':value});
}
return this['opts']['fillStyle'];
}
circle.prototype['withinShape'] = function(pos){
pos = gridPoint['fuzzy'](pos);
return (this['coords']()[0] instanceof gridPoint && typeof this['radius']() == 'number') && (this['coords']()[0]['distance'](pos) <= this['radius']());
}
circle.prototype['intersects'] = function(value){
if(value instanceof bounds && this['coords']()[0] instanceof gridPoint){
if(value['isWithin'](this['coords']()[0])){
return true;
}else if(typeof this['radius']() == 'number'){
var
sw = value['sw'],
ne = value['ne'],
distanceTests = [sw,ne,{'x':sw['x'], 'y':ne['y']}, {'x':ne['x'], 'y':sw['y']}]
;
for(var i=0;i<distanceTests.length;++i){
if(this['withinShape'](distanceTests[i])){
return true;
}
}
}
}
return false;
}
shape['circle'] = circle;
})(window); | aurora-sim/Aurora-WebUI | www/worldmap/javascripts/mapapi.shape.js | JavaScript | bsd-3-clause | 12,790 |
{% extends "customer/baseaccountpage.html" %}
{% load i18n %}
{% block extra_breadcrumbs %}
<li>
<a href="{% url 'customer:notifications-inbox' %}">{% trans 'Notifications inbox' %}</a>
<span class="divider">/</span>
</li>
{% endblock %}
{% block tabcontent %}
<table class="table table-striped table-bordered">
{% if notification.sender %}
<tr>
<th>{% trans 'Sender' %}</th>
<td>{{ notification.sender }}</td>
</tr>
{% endif %}
<tr>
<th>{% trans 'Date sent' %}</th>
<td>{{ notification.date_sent }}</td>
</tr>
{% if notification.body %}
<tr>
<th>{% trans 'Subject' %}</th>
<td>{{ notification.subject|safe }}</td>
</tr>
<tr>
<th>{% trans 'Body' %}</th>
<td>{{ notification.body }}</td>
</tr>
{% else %}
<tr>
<th>{% trans 'Message' %}</th>
<th>{{ notification.subject|safe }}</th>
</tr>
{% endif %}
</table>
<div class="form-actions"><a href="{% url 'customer:notifications-inbox' %}" class="btn">{% trans 'Return to notifications inbox' %}</a></div>
{% endblock tabcontent %}
| marcoantoniooliveira/labweb | oscar/templates/oscar/customer/notifications/detail.html | HTML | bsd-3-clause | 1,304 |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_PUBLIC_RENDERER_RENDER_FRAME_OBSERVER_H_
#define CONTENT_PUBLIC_RENDERER_RENDER_FRAME_OBSERVER_H_
#include "base/basictypes.h"
#include "base/compiler_specific.h"
#include "base/strings/string16.h"
#include "content/common/content_export.h"
#include "ipc/ipc_listener.h"
#include "ipc/ipc_sender.h"
#include "third_party/WebKit/public/platform/WebVector.h"
#include "v8/include/v8.h"
namespace blink {
class WebFormElement;
class WebFrame;
class WebNode;
class WebString;
struct WebURLError;
}
namespace content {
class RendererPpapiHost;
class RenderFrame;
class RenderFrameImpl;
// Base class for objects that want to filter incoming IPCs, and also get
// notified of changes to the frame.
class CONTENT_EXPORT RenderFrameObserver : public IPC::Listener,
public IPC::Sender {
public:
// By default, observers will be deleted when the RenderFrame goes away. If
// they want to outlive it, they can override this function.
virtual void OnDestruct();
// Called when a Pepper plugin is created.
virtual void DidCreatePepperPlugin(RendererPpapiHost* host) {}
// Called when a load is explicitly stopped by the user or browser.
virtual void OnStop() {}
// Called when the RenderFrame visiblity is changed.
virtual void WasHidden() {}
virtual void WasShown() {}
// Called when associated widget is about to close.
virtual void WidgetWillClose() {}
// These match the Blink API notifications
virtual void DidCreateNewDocument() {}
virtual void DidCreateDocumentElement() {}
virtual void DidCommitProvisionalLoad(bool is_new_navigation,
bool is_same_page_navigation) {}
virtual void DidStartProvisionalLoad() {}
virtual void DidFailProvisionalLoad(const blink::WebURLError& error) {}
virtual void DidFinishLoad() {}
virtual void DidFinishDocumentLoad() {}
virtual void DidCreateScriptContext(v8::Local<v8::Context> context,
int extension_group,
int world_id) {}
virtual void WillReleaseScriptContext(v8::Local<v8::Context> context,
int world_id) {}
virtual void DidClearWindowObject() {}
virtual void DidChangeManifest() {}
virtual void DidChangeScrollOffset() {}
virtual void WillSendSubmitEvent(const blink::WebFormElement& form) {}
virtual void WillSubmitForm(const blink::WebFormElement& form) {}
virtual void DidMatchCSS(
const blink::WebVector<blink::WebString>& newly_matching_selectors,
const blink::WebVector<blink::WebString>& stopped_matching_selectors) {}
// Called before FrameWillClose, when this frame has been detached from the
// view, but has not been closed yet. This *will* be called when parent frames
// are closing. Since the frame is already detached from the DOM at this time
// it should not be inspected.
virtual void FrameDetached() {}
// Called when the frame will soon be closed. This is the last opportunity to
// send messages to the host (e.g., for clean-up, shutdown, etc.). This is
// *not* called on child frames when parent frames are being closed.
virtual void FrameWillClose() {}
// Called when we receive a console message from Blink for which we requested
// extra details (like the stack trace). |message| is the error message,
// |source| is the Blink-reported source of the error (either external or
// internal), and |stack_trace| is the stack trace of the error in a
// human-readable format (each frame is formatted as
// "\n at function_name (source:line_number:column_number)").
virtual void DetailedConsoleMessageAdded(const base::string16& message,
const base::string16& source,
const base::string16& stack_trace,
int32 line_number,
int32 severity_level) {}
// Called when a compositor frame has committed.
virtual void DidCommitCompositorFrame() {}
// Called when the focused node has changed to |node|.
virtual void FocusedNodeChanged(const blink::WebNode& node) {}
// IPC::Listener implementation.
bool OnMessageReceived(const IPC::Message& message) override;
// IPC::Sender implementation.
bool Send(IPC::Message* message) override;
RenderFrame* render_frame() const;
int routing_id() const { return routing_id_; }
protected:
explicit RenderFrameObserver(RenderFrame* render_frame);
~RenderFrameObserver() override;
private:
friend class RenderFrameImpl;
// This is called by the RenderFrame when it's going away so that this object
// can null out its pointer.
void RenderFrameGone();
RenderFrame* render_frame_;
// The routing ID of the associated RenderFrame.
int routing_id_;
DISALLOW_COPY_AND_ASSIGN(RenderFrameObserver);
};
} // namespace content
#endif // CONTENT_PUBLIC_RENDERER_RENDER_FRAME_OBSERVER_H_
| Pluto-tv/chromium-crosswalk | content/public/renderer/render_frame_observer.h | C | bsd-3-clause | 5,187 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN">
<html>
<head>
<title>Forms :: text input : float</title>
<style type="text/css">
form * {
font-family: Ahem;
font-size: 1em;
line-height: 1em;
}
fieldset, div {
background-color: red;
padding: 0;
margin: 1em;
border: none;
width: 2em;
height: 1em;
}
input, span {
font-size: 1em;
line-height: 1em;
color: lime;
padding: 0;
margin: 0;
border: none;
width: 1em;
height: 1em;
float: left;
}
</style>
</head>
<body>
<form action="">
<fieldset>
<input type="text" value="x">
<input type="text" value="x">
</fieldset>
<div>
<input type="text" value="x">
<input type="text" value="x">
</div>
<div>
<span>x</span>
<span>x</span>
</div>
</form>
<h4>Ahem font required for this test</h4>
<p>you should see three identical, green rectangles, and no red</p>
</body>
</html> | frivoal/presto-testo | core/standards/forms/input-text-float.html | HTML | bsd-3-clause | 852 |
<html>
<head>
<script>
function log(str)
{
var li = document.createElement("li");
li.appendChild(document.createTextNode(str));
var console = document.getElementById("console");
console.appendChild(li);
}
function assertEqual(message, actual, expected)
{
if (actual != expected)
log("\n" + message + ": Failure, actual: " + actual + "; expected: " + expected);
else
log("\n" + message + ": Success");
}
function runTests() {
if (window.testRunner) {
testRunner.dumpAsText();
}
assertEqual("stringify", JSON.stringify(window.location), '{"ancestorOrigins":{},"origin":"http://127.0.0.1:8000","hash":"","search":"","pathname":"/dom/location-stringify.html","port":"8000","hostname":"127.0.0.1","host":"127.0.0.1:8000","protocol":"http:","href":"http://127.0.0.1:8000/dom/location-stringify.html"}');
}
</script>
</head>
<body onload="runTests();">
<p>
Tests that Location interface is [Unforgeable], thus JSON.stringify() works well for Location objects.
</p>
<ul id="console" dir=ltr></ul>
</body>
</html>
| smishenk/blink-crosswalk | LayoutTests/http/tests/dom/location-stringify.html | HTML | bsd-3-clause | 1,068 |
using System;
using FluentNHibernate.Conventions;
using FluentNHibernate.Conventions.AcceptanceCriteria;
using FluentNHibernate.Conventions.Instances;
using FluentNHibernate.Conventions.Inspections;
using FluentNHibernate.Mapping;
using NUnit.Framework;
namespace FluentNHibernate.Testing.ConventionFinderTests
{
[TestFixture]
public class AddingTypeTests
{
private DefaultConventionFinder finder;
[SetUp]
public void CreateFinder()
{
finder = new DefaultConventionFinder();
}
[Test]
public void AddingSingleShouldntThrowIfHasParameterlessConstructor()
{
var ex = Catch.Exception(() => finder.Add<ConventionWithParameterlessConstructor>());
ex.ShouldBeNull();
}
[Test]
public void AddingSingleShouldntThrowIfHasIConventionFinderConstructor()
{
var ex = Catch.Exception(() => finder.Add<ConventionWithIConventionFinderConstructor>());
ex.ShouldBeNull();
}
[Test]
public void AddingSingleShouldThrowIfNoParameterlessConstructor()
{
var ex = Catch.Exception(() => finder.Add<ConventionWithoutValidConstructor>());
ex.ShouldBeOfType<MissingConstructorException>();
ex.ShouldNotBeNull();
}
[Test]
public void AddingSingleShouldThrowIfNoIConventionFinderConstructor()
{
var ex = Catch.Exception(() => finder.Add<ConventionWithoutValidConstructor>());
ex.ShouldBeOfType<MissingConstructorException>();
ex.ShouldNotBeNull();
}
[Test]
public void AddingAssemblyShouldntThrowIfNoIConventionFinderConstructor()
{
var ex = Catch.Exception(() => finder.AddAssembly(typeof(ConventionWithoutValidConstructor).Assembly));
ex.ShouldBeNull();
}
}
public class ConventionWithParameterlessConstructor : IClassConvention
{
public ConventionWithParameterlessConstructor()
{ }
public void Apply(IClassInstance instance) {}
}
public class ConventionWithIConventionFinderConstructor : IClassConvention
{
public ConventionWithIConventionFinderConstructor(IConventionFinder conventionFinder)
{ }
public void Apply(IClassInstance instance) {}
}
public class ConventionWithoutValidConstructor : IClassConvention
{
public ConventionWithoutValidConstructor(int someParameter)
{ }
public void Apply(IClassInstance instance) {}
}
} | mtscout6/fluent-nhibernate | src/FluentNHibernate.Testing/ConventionFinderTests/AddingTypeTests.cs | C# | bsd-3-clause | 2,683 |
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
* @flow strict-local
*/
'use strict';
import type {Node} from 'React';
import {ActivityIndicator, StyleSheet, View} from 'react-native';
import React, {Component} from 'react';
type State = {|animating: boolean|};
type Props = $ReadOnly<{||}>;
type Timer = TimeoutID;
class ToggleAnimatingActivityIndicator extends Component<Props, State> {
_timer: Timer;
constructor(props: Props) {
super(props);
this.state = {
animating: true,
};
}
componentDidMount() {
this.setToggleTimeout();
}
componentWillUnmount() {
clearTimeout(this._timer);
}
setToggleTimeout() {
this._timer = setTimeout(() => {
this.setState({animating: !this.state.animating});
this.setToggleTimeout();
}, 2000);
}
render(): Node {
return (
<ActivityIndicator
animating={this.state.animating}
style={[styles.centering, {height: 80}]}
size="large"
/>
);
}
}
const styles = StyleSheet.create({
centering: {
alignItems: 'center',
justifyContent: 'center',
padding: 8,
},
gray: {
backgroundColor: '#cccccc',
},
horizontal: {
flexDirection: 'row',
justifyContent: 'space-around',
padding: 8,
},
});
exports.displayName = (undefined: ?string);
exports.category = 'UI';
exports.framework = 'React';
exports.title = 'ActivityIndicator';
exports.documentationURL = 'https://reactnative.dev/docs/activityindicator';
exports.description = 'Animated loading indicators.';
exports.examples = [
{
title: 'Default (small, white)',
render(): Node {
return (
<ActivityIndicator
style={[styles.centering, styles.gray]}
color="white"
/>
);
},
},
{
title: 'Gray',
render(): Node {
return (
<View>
<ActivityIndicator style={[styles.centering]} />
<ActivityIndicator style={[styles.centering, styles.gray]} />
</View>
);
},
},
{
title: 'Custom colors',
render(): Node {
return (
<View style={styles.horizontal}>
<ActivityIndicator color="#0000ff" />
<ActivityIndicator color="#aa00aa" />
<ActivityIndicator color="#aa3300" />
<ActivityIndicator color="#00aa00" />
</View>
);
},
},
{
title: 'Large',
render(): Node {
return (
<ActivityIndicator
style={[styles.centering, styles.gray]}
size="large"
color="white"
/>
);
},
},
{
title: 'Large, custom colors',
render(): Node {
return (
<View style={styles.horizontal}>
<ActivityIndicator size="large" color="#0000ff" />
<ActivityIndicator size="large" color="#aa00aa" />
<ActivityIndicator size="large" color="#aa3300" />
<ActivityIndicator size="large" color="#00aa00" />
</View>
);
},
},
{
title: 'Start/stop',
render(): Node {
return <ToggleAnimatingActivityIndicator />;
},
},
{
title: 'Custom size',
render(): Node {
return (
<ActivityIndicator
style={[styles.centering, {transform: [{scale: 1.5}]}]}
size="large"
/>
);
},
},
{
platform: 'android',
title: 'Custom size (size: 75)',
render(): Node {
return <ActivityIndicator style={styles.centering} size={75} />;
},
},
];
| hoangpham95/react-native | packages/rn-tester/js/examples/ActivityIndicator/ActivityIndicatorExample.js | JavaScript | bsd-3-clause | 3,613 |
for astTuple in Query.input.tuples('ast'):
if type(astTuple.ast) is Field:
modifiers = astTuple.ast.modifiers
nonFinalPublic = modifiers.isSet(Modifier.ModifierFlag.Public) and not modifiers.isSet(Modifier.ModifierFlag.Final)
if not nonFinalPublic:
Query.input.remove(astTuple)
Query.result = Query.input | Vaishal-shah/Envision | InformationScripting/scripts/evaluation/nonFinalPublic.py | Python | bsd-3-clause | 345 |
"""
This module contains some assorted functions used in tests
"""
from __future__ import absolute_import
import os
from importlib import import_module
from twisted.trial.unittest import SkipTest
from scrapy.exceptions import NotConfigured
from scrapy.utils.boto import is_botocore
def assert_aws_environ():
"""Asserts the current environment is suitable for running AWS testsi.
Raises SkipTest with the reason if it's not.
"""
skip_if_no_boto()
if 'AWS_ACCESS_KEY_ID' not in os.environ:
raise SkipTest("AWS keys not found")
def assert_gcs_environ():
if 'GCS_PROJECT_ID' not in os.environ:
raise SkipTest("GCS_PROJECT_ID not found")
def skip_if_no_boto():
try:
is_botocore()
except NotConfigured as e:
raise SkipTest(e)
def get_s3_content_and_delete(bucket, path, with_key=False):
""" Get content from s3 key, and delete key afterwards.
"""
if is_botocore():
import botocore.session
session = botocore.session.get_session()
client = session.create_client('s3')
key = client.get_object(Bucket=bucket, Key=path)
content = key['Body'].read()
client.delete_object(Bucket=bucket, Key=path)
else:
import boto
# assuming boto=2.2.2
bucket = boto.connect_s3().get_bucket(bucket, validate=False)
key = bucket.get_key(path)
content = key.get_contents_as_string()
bucket.delete_key(path)
return (content, key) if with_key else content
def get_gcs_content_and_delete(bucket, path):
from google.cloud import storage
client = storage.Client(project=os.environ.get('GCS_PROJECT_ID'))
bucket = client.get_bucket(bucket)
blob = bucket.get_blob(path)
content = blob.download_as_string()
bucket.delete_blob(path)
return content, blob
def get_crawler(spidercls=None, settings_dict=None):
"""Return an unconfigured Crawler object. If settings_dict is given, it
will be used to populate the crawler settings with a project level
priority.
"""
from scrapy.crawler import CrawlerRunner
from scrapy.spiders import Spider
runner = CrawlerRunner(settings_dict)
return runner.create_crawler(spidercls or Spider)
def get_pythonpath():
"""Return a PYTHONPATH suitable to use in processes so that they find this
installation of Scrapy"""
scrapy_path = import_module('scrapy').__path__[0]
return os.path.dirname(scrapy_path) + os.pathsep + os.environ.get('PYTHONPATH', '')
def get_testenv():
"""Return a OS environment dict suitable to fork processes that need to import
this installation of Scrapy, instead of a system installed one.
"""
env = os.environ.copy()
env['PYTHONPATH'] = get_pythonpath()
return env
def assert_samelines(testcase, text1, text2, msg=None):
"""Asserts text1 and text2 have the same lines, ignoring differences in
line endings between platforms
"""
testcase.assertEqual(text1.splitlines(), text2.splitlines(), msg)
| umrashrf/scrapy | scrapy/utils/test.py | Python | bsd-3-clause | 3,020 |
/*
* Copyright (c) 2015, Texas Instruments Incorporated - http://www.ti.com/
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*---------------------------------------------------------------------------*/
#include "rf-core/api/mailbox.h"
#include "rf-core/api/common_cmd.h"
#include "rf-core/api/prop_cmd.h"
/*---------------------------------------------------------------------------*/
/* Overrides for CMD_PROP_RADIO_DIV_SETUP */
uint32_t overrides[] =
{
/* override_synth.xml */
HW32_ARRAY_OVERRIDE(0x6088, 1),
(uint32_t)0x0000001A,
ADI_HALFREG_OVERRIDE(0, 61, 0xF, 0xD),
HW32_ARRAY_OVERRIDE(0x4038, 1),
(uint32_t)0x0000003A,
HW_REG_OVERRIDE(0x4020, 0x7F00),
HW_REG_OVERRIDE(0x4064, 0x0040),
(uint32_t)0x684A3,
(uint32_t)0xC0040141,
(uint32_t)0x0533B107,
(uint32_t)0xA480583,
(uint32_t)0x7AB80603,
ADI_REG_OVERRIDE(1, 4, 0x1F),
ADI_HALFREG_OVERRIDE(1, 7, 0x4, 0x4),
HW_REG_OVERRIDE(0x6084, 0x35F1),
(uint32_t)0x00038883,
(uint32_t)0x00FB88A3,
/* TX power override */
ADI_REG_OVERRIDE(0, 12, 0xF9),
/* Overrides for CRC16 functionality */
(uint32_t)0x943,
(uint32_t)0x963,
(uint32_t)0xFFFFFFFF,
};
/*---------------------------------------------------------------------------*/
/* CMD_PROP_RADIO_DIV_SETUP */
rfc_CMD_PROP_RADIO_DIV_SETUP_t smartrf_settings_cmd_prop_radio_div_setup =
{
.commandNo = 0x3807,
.status = 0x0000,
.pNextOp = 0,
.startTime = 0x00000000,
.startTrigger.triggerType = 0x0,
.startTrigger.bEnaCmd = 0x0,
.startTrigger.triggerNo = 0x0,
.startTrigger.pastTrig = 0x0,
.condition.rule = 0x1,
.condition.nSkip = 0x0,
.modulation.modType = 0x1,
.modulation.deviation = 0x64,
.symbolRate.preScale = 0xf,
.symbolRate.rateWord = 0x8000,
.rxBw = 0x24,
.preamConf.nPreamBytes = 0x3,
.preamConf.preamMode = 0x0,
.formatConf.nSwBits = 0x18,
.formatConf.bBitReversal = 0x0,
.formatConf.bMsbFirst = 0x1,
.formatConf.fecMode = 0x0,
/* 7: .4g mode with dynamic whitening and CRC choice */
.formatConf.whitenMode = 0x7,
.config.frontEndMode = 0x0, /* Differential mode */
.config.biasMode = 0x1, /* External bias*/
.config.bNoFsPowerUp = 0x0,
.txPower = 0x00, /* Driver sets correct value */
.pRegOverride = overrides,
.intFreq = 0x8000,
.centerFreq = 868,
.loDivider = 0x05,
};
/*---------------------------------------------------------------------------*/
/* CMD_FS */
rfc_CMD_FS_t smartrf_settings_cmd_fs =
{
.commandNo = 0x0803,
.status = 0x0000,
.pNextOp = 0,
.startTime = 0x00000000,
.startTrigger.triggerType = 0x0,
.startTrigger.bEnaCmd = 0x0,
.startTrigger.triggerNo = 0x0,
.startTrigger.pastTrig = 0x0,
.condition.rule = 0x1,
.condition.nSkip = 0x0,
.frequency = 868,
.fractFreq = 0x0000,
.synthConf.bTxMode = 0x0,
.synthConf.refFreq = 0x0,
.__dummy0 = 0x00,
.midPrecal = 0x00,
.ktPrecal = 0x00,
.tdcPrecal = 0x0000,
};
/*---------------------------------------------------------------------------*/
/* CMD_PROP_TX_ADV */
rfc_CMD_PROP_TX_ADV_t smartrf_settings_cmd_prop_tx_adv =
{
.commandNo = 0x3803,
.status = 0x0000,
.pNextOp = 0,
.startTime = 0x00000000,
.startTrigger.triggerType = 0x0,
.startTrigger.bEnaCmd = 0x0,
.startTrigger.triggerNo = 0x0,
.startTrigger.pastTrig = 0x0,
.condition.rule = 0x1,
.condition.nSkip = 0x0,
.pktConf.bFsOff = 0x0,
.pktConf.bUseCrc = 0x1,
.pktConf.bCrcIncSw = 0x0, /* .4g mode */
.pktConf.bCrcIncHdr = 0x0, /* .4g mode */
.numHdrBits = 0x10 /* 16: .4g mode */,
.pktLen = 0x0000,
.startConf.bExtTxTrig = 0x0,
.startConf.inputMode = 0x0,
.startConf.source = 0x0,
.preTrigger.triggerType = TRIG_REL_START,
.preTrigger.bEnaCmd = 0x0,
.preTrigger.triggerNo = 0x0,
.preTrigger.pastTrig = 0x1,
.preTime = 0x00000000,
.syncWord = 0x0055904e,
.pPkt = 0,
};
/*---------------------------------------------------------------------------*/
/* CMD_PROP_RX_ADV */
rfc_CMD_PROP_RX_ADV_t smartrf_settings_cmd_prop_rx_adv =
{
.commandNo = 0x3804,
.status = 0x0000,
.pNextOp = 0,
.startTime = 0x00000000,
.startTrigger.triggerType = 0x0,
.startTrigger.bEnaCmd = 0x0,
.startTrigger.triggerNo = 0x0,
.startTrigger.pastTrig = 0x0,
.condition.rule = 0x1,
.condition.nSkip = 0x0,
.pktConf.bFsOff = 0x0,
.pktConf.bRepeatOk = 0x1,
.pktConf.bRepeatNok = 0x1,
.pktConf.bUseCrc = 0x1,
.pktConf.bCrcIncSw = 0x0, /* .4g mode */
.pktConf.bCrcIncHdr = 0x0, /* .4g mode */
.pktConf.endType = 0x0,
.pktConf.filterOp = 0x1,
.rxConf.bAutoFlushIgnored = 0x1,
.rxConf.bAutoFlushCrcErr = 0x1,
.rxConf.bIncludeHdr = 0x0,
.rxConf.bIncludeCrc = 0x0,
.rxConf.bAppendRssi = 0x1,
.rxConf.bAppendTimestamp = 0x0,
.rxConf.bAppendStatus = 0x1,
.syncWord0 = 0x0055904e,
.syncWord1 = 0x00000000,
.maxPktLen = 0x0000, /* To be populated by the driver. */
.hdrConf.numHdrBits = 0x10, /* 16: .4g mode */
.hdrConf.lenPos = 0x0, /* .4g mode */
.hdrConf.numLenBits = 0x0B, /* 11 = 0x0B .4g mode */
.addrConf.addrType = 0x0,
.addrConf.addrSize = 0x0,
.addrConf.addrPos = 0x0,
.addrConf.numAddr = 0x0,
.lenOffset = -4, /* .4g mode */
.endTrigger.triggerType = TRIG_NEVER,
.endTrigger.bEnaCmd = 0x0,
.endTrigger.triggerNo = 0x0,
.endTrigger.pastTrig = 0x0,
.endTime = 0x00000000,
.pAddr = 0,
.pQueue = 0,
.pOutput = 0,
};
/*---------------------------------------------------------------------------*/
| MohamedSeliem/contiki | cpu/cc26xx-cc13xx/rf-core/smartrf-settings.c | C | bsd-3-clause | 6,889 |
/**
* Copyright (c) 2015-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @providesModule takeSnapshot
* @flow
*/
'use strict';
const UIManager = require('UIManager');
const findNumericNodeHandle = require('findNumericNodeHandle');
/**
* Capture an image of the screen, window or an individual view. The image
* will be stored in a temporary file that will only exist for as long as the
* app is running.
*
* The `view` argument can be the literal string `window` if you want to
* capture the entire window, or it can be a reference to a specific
* React Native component.
*
* The `options` argument may include:
* - width/height (number) - the width and height of the image to capture.
* - format (string) - either 'png' or 'jpeg'. Defaults to 'png'.
* - quality (number) - the quality when using jpeg. 0.0 - 1.0 (default).
*
* Returns a Promise.
* @platform ios
*/
function takeSnapshot(
view?: 'window' | React$Element<any> | number,
options?: {
width?: number,
height?: number,
format?: 'png' | 'jpeg',
quality?: number,
},
): Promise<any> {
if (typeof view !== 'number' && view !== 'window') {
view = findNumericNodeHandle(view) || 'window';
}
// Call the hidden '__takeSnapshot' method; the main one throws an error to
// prevent accidental backwards-incompatible usage.
return UIManager.__takeSnapshot(view, options);
}
module.exports = takeSnapshot;
| yangshun/react | src/renderers/native/takeSnapshot.js | JavaScript | bsd-3-clause | 1,528 |
<html>
<head>
<title>GB18030 lead 8136C8</title>
<meta http-equiv='content-type' content='text/html;charset=GB18030'>
<link rel='stylesheet' href='tests.css'>
</head>
<body>
<table>
<caption>Four-byte lead 8136C8</caption>
<tr><th colspan=2>GB18030<th colspan=3>Unicode
<tr><td>=8136C830<td> 6È0 <td>U+21A0<td>↠<td class=u>RIGHTWARDS TWO HEADED ARROW
<tr><td>=8136C831<td> 6È1 <td>U+21A1<td>↡<td class=u>DOWNWARDS TWO HEADED ARROW
<tr><td>=8136C832<td> 6È2 <td>U+21A2<td>↢<td class=u>LEFTWARDS ARROW WITH TAIL
<tr><td>=8136C833<td> 6È3 <td>U+21A3<td>↣<td class=u>RIGHTWARDS ARROW WITH TAIL
<tr><td>=8136C834<td> 6È4 <td>U+21A4<td>↤<td class=u>LEFTWARDS ARROW FROM BAR
<tr><td>=8136C835<td> 6È5 <td>U+21A5<td>↥<td class=u>UPWARDS ARROW FROM BAR
<tr><td>=8136C836<td> 6È6 <td>U+21A6<td>↦<td class=u>RIGHTWARDS ARROW FROM BAR
<tr><td>=8136C837<td> 6È7 <td>U+21A7<td>↧<td class=u>DOWNWARDS ARROW FROM BAR
<tr><td>=8136C838<td> 6È8 <td>U+21A8<td>↨<td class=u>UP DOWN ARROW WITH BASE
<tr><td>=8136C839<td> 6È9 <td>U+21A9<td>↩<td class=u>LEFTWARDS ARROW WITH HOOK
</table>
<p><a href='charset/GB18030.html'>Return</a></p>
</body>
</html>
| frivoal/presto-testo | imported/peter/unicode/comparative/GB18030-8136c8.html | HTML | bsd-3-clause | 1,253 |
(function($, window, document) {
var pluginName = 'fatNav',
defaults = {};
function Plugin(options) {
this.settings = $.extend({}, defaults, options);
this._defaults = defaults;
this._name = pluginName;
this.init();
}
$.extend(Plugin.prototype, {
init: function() {
var self = this;
var $nav = this.$nav = $('.fat-nav');
var $hamburger = this.$hamburger = $('<a href="javascript:void(0)" class="hamburger"><div class="hamburger__icon"></div></a>');
this._bodyOverflow = $('body').css('overflow');
// Hack to prevent mobile safari scrolling the whole body when nav is open
if (navigator.userAgent.match(/(iPad|iPhone|iPod)/g)) {
$nav.children().css({
'height': '110%',
'transform': 'translateY(-5%)'
});
}
$('body').append($hamburger);
$().add($hamburger).add($nav.find('a')).on('click', function(e) {
self.toggleNav();
});
},
toggleNav: function() {
var self = this;
this.$nav.fadeToggle(400);
self.toggleBodyOverflow();
$().add(this.$hamburger).add(this.$nav).toggleClass('active');
},
toggleBodyOverflow: function() {
var self = this;
var $body = $('body');
$body.toggleClass('no-scroll');
var isNavOpen = $body.hasClass('no-scroll');
// $body.width($body.width());
$body.css('overflow', isNavOpen ? 'hidden' : self._bodyOverflow);
}
});
if (typeof $[pluginName] === 'undefined') {
$[pluginName] = function(options) {
return new Plugin(this, options);
};
}
}(jQuery, window, document)); | wangrunxinyes/sby | frontend/plugin/include/extensions/fullscren.choices/src/js/jquery.fatNav.js | JavaScript | bsd-3-clause | 2,044 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.