repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
tausifmuzaffar/bisApp | node_modules/native-base/src/basic/Tabs/ScrollableTabBar.js | 8873 | const React = require('react');
const ReactNative = require('react-native');
import { connectStyle, StyleProvider } from '@shoutem/theme';
import variable from './../../theme/variables/platform';
import { TabHeading, Text, TabContainer } from './../../index';
import _ from 'lodash';
const {
View,
Animated,
StyleSheet,
ScrollView,
Platform,
Dimensions,
} = ReactNative;
const Button = require('./Button');
const WINDOW_WIDTH = Dimensions.get('window').width;
const ScrollableTabBar = React.createClass({
propTypes: {
goToPage: React.PropTypes.func,
activeTab: React.PropTypes.number,
tabs: React.PropTypes.array,
backgroundColor: React.PropTypes.string,
activeTextColor: React.PropTypes.string,
inactiveTextColor: React.PropTypes.string,
scrollOffset: React.PropTypes.number,
style: View.propTypes.style,
tabStyle: View.propTypes.style,
tabsContainerStyle: View.propTypes.style,
renderTab: React.PropTypes.func,
underlineStyle: View.propTypes.style,
onScroll:React.PropTypes.func,
},
contextTypes: {
theme: React.PropTypes.object,
},
getDefaultProps() {
return {
scrollOffset: 52,
activeTextColor: 'navy',
inactiveTextColor: 'black',
backgroundColor: null,
style: {},
tabStyle: {},
tabsContainerStyle: {},
underlineStyle: {},
};
},
getInitialState() {
this._tabsMeasurements = [];
return {
_leftTabUnderline: new Animated.Value(0),
_widthTabUnderline: new Animated.Value(0),
_containerWidth: null,
};
},
componentDidMount() {
this.props.scrollValue.addListener(this.updateView);
},
updateView(offset) {
const position = Math.floor(offset.value);
const pageOffset = offset.value % 1;
const tabCount = this.props.tabs.length;
const lastTabPosition = tabCount - 1;
if (tabCount === 0 || offset.value < 0 || offset.value > lastTabPosition) {
return;
}
if (this.necessarilyMeasurementsCompleted(position, position === lastTabPosition)) {
this.updateTabPanel(position, pageOffset);
this.updateTabUnderline(position, pageOffset, tabCount);
}
},
necessarilyMeasurementsCompleted(position, isLastTab) {
return this._tabsMeasurements[position] &&
(isLastTab || this._tabsMeasurements[position + 1]) &&
this._tabContainerMeasurements &&
this._containerMeasurements;
},
updateTabPanel(position, pageOffset) {
const containerWidth = this._containerMeasurements.width;
const tabWidth = this._tabsMeasurements[position].width;
const nextTabMeasurements = this._tabsMeasurements[position + 1];
const nextTabWidth = nextTabMeasurements && nextTabMeasurements.width || 0;
const tabOffset = this._tabsMeasurements[position].left;
const absolutePageOffset = pageOffset * tabWidth;
let newScrollX = tabOffset + absolutePageOffset;
// center tab and smooth tab change (for when tabWidth changes a lot between two tabs)
newScrollX -= (containerWidth - (1 - pageOffset) * tabWidth - pageOffset * nextTabWidth) / 2;
newScrollX = newScrollX >= 0 ? newScrollX : 0;
if (Platform.OS === 'android') {
this._scrollView.scrollTo({x: newScrollX, y: 0, animated: false, });
} else {
const rightBoundScroll = this._tabContainerMeasurements.width - (this._containerMeasurements.width);
newScrollX = newScrollX > rightBoundScroll ? rightBoundScroll : newScrollX;
this._scrollView.scrollTo({x: newScrollX, y: 0, animated: false, });
}
},
updateTabUnderline(position, pageOffset, tabCount) {
const lineLeft = this._tabsMeasurements[position].left;
const lineRight = this._tabsMeasurements[position].right;
if (position < tabCount - 1) {
const nextTabLeft = this._tabsMeasurements[position + 1].left;
const nextTabRight = this._tabsMeasurements[position + 1].right;
const newLineLeft = (pageOffset * nextTabLeft + (1 - pageOffset) * lineLeft);
const newLineRight = (pageOffset * nextTabRight + (1 - pageOffset) * lineRight);
this.state._leftTabUnderline.setValue(newLineLeft);
this.state._widthTabUnderline.setValue(newLineRight - newLineLeft);
} else {
this.state._leftTabUnderline.setValue(lineLeft);
this.state._widthTabUnderline.setValue(lineRight - lineLeft);
}
},
renderTab(name, page, isTabActive, onPressHandler, onLayoutHandler, tabStyle, activeTabStyle, textStyle, activeTextStyle, tabHeaderStyle) {
const headerContent = (typeof name!=='string') ? name.props.children : undefined;
const { activeTextColor, inactiveTextColor } = this.props;
const textColor = isTabActive ? activeTextColor : inactiveTextColor;
const fontWeight = isTabActive ? 'bold' : 'normal';
if (typeof name==='string') {
return <Button
key={`${name}_${page}`}
onPress={() => onPressHandler(page)}
onLayout={onLayoutHandler}
>
<TabHeading scrollable style={(isTabActive) ? activeTabStyle : tabStyle} active={isTabActive}>
<Text style={(isTabActive) ? activeTextStyle : textStyle}>
{name}
</Text>
</TabHeading>
</Button>;
}
else {
return <Button
key={_.random(1.2, 5.2)}
onPress={() => onPressHandler(page)}
>
<TabHeading scrollable style={tabHeaderStyle} active={isTabActive}>
{headerContent}
</TabHeading>
</Button>
}
},
measureTab(page, event) {
const { x, width, height, } = event.nativeEvent.layout;
this._tabsMeasurements[page] = {left: x, right: x + width, width, height, };
this.updateView({value: this.props.scrollValue._value, });
},
render() {
const variables = (this.context.theme) ? this.context.theme['@@shoutem.theme/themeStyle'].variables : variable;
const tabUnderlineStyle = {
position: 'absolute',
height: 4,
backgroundColor: variables.topTabBarActiveBorderColor,
bottom: 0
};
const dynamicTabUnderline = {
left: this.state._leftTabUnderline,
width: this.state._widthTabUnderline,
};
return <View
style={[styles.container, {backgroundColor: this.props.backgroundColor, }, this.props.style, ]}
onLayout={this.onContainerLayout}
>
<ScrollView
automaticallyAdjustContentInsets={false}
ref={(scrollView) => { this._scrollView = scrollView; }}
horizontal={true}
showsHorizontalScrollIndicator={false}
showsVerticalScrollIndicator={false}
directionalLockEnabled={true}
onScroll={this.props.onScroll}
bounces={false}
scrollsToTop={false}
>
<View
style={[styles.tabs, {width: this.state._containerWidth, }, this.props.tabsContainerStyle, ]}
ref={'tabContainer'}
onLayout={this.onTabContainerLayout}
>
{this.props.tabs.map((name, page) => {
const isTabActive = this.props.activeTab === page;
const renderTab = this.props.renderTab || this.renderTab;
return renderTab(name, page, isTabActive, this.props.goToPage, this.measureTab.bind(this, page), this.props.tabStyle[page], this.props.activeTabStyle[page], this.props.textStyle[page], this.props.activeTextStyle[page], this.props.tabHeaderStyle[page]);
})}
<Animated.View style={[tabUnderlineStyle, dynamicTabUnderline, this.props.underlineStyle, ]} />
</View>
</ScrollView>
</View>;
},
componentWillReceiveProps(nextProps) {
// If the tabs change, force the width of the tabs container to be recalculated
if (JSON.stringify(this.props.tabs) !== JSON.stringify(nextProps.tabs) && this.state._containerWidth) {
this.setState({ _containerWidth: null, });
}
},
onTabContainerLayout(e) {
this._tabContainerMeasurements = e.nativeEvent.layout;
let width = this._tabContainerMeasurements.width;
if (width < WINDOW_WIDTH) {
width = WINDOW_WIDTH;
}
this.setState({ _containerWidth: width, });
this.updateView({value: this.props.scrollValue._value, });
},
onContainerLayout(e) {
this._containerMeasurements = e.nativeEvent.layout;
this.updateView({value: this.props.scrollValue._value, });
},
});
// module.exports = ScrollableTabBar;
const StyledTab = connectStyle('NativeBase.ScrollableTab', {}, mapPropsToStyleNames)(ScrollableTabBar);
export {
StyledTab as ScrollableTab,
};
const styles = StyleSheet.create({
tab: {
height: 49,
alignItems: 'center',
justifyContent: 'center',
paddingLeft: 20,
paddingRight: 20,
},
container: {
height: 50,
borderWidth: 1,
borderTopWidth: 0,
borderLeftWidth: 0,
borderRightWidth: 0,
borderColor: '#ccc',
},
tabs: {
flexDirection: 'row',
justifyContent: 'space-around',
},
});
| apache-2.0 |
srose/keycloak | services/src/main/java/org/keycloak/services/clientpolicy/condition/ClientRolesConditionFactory.java | 2484 | /*
* Copyright 2021 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.services.clientpolicy.condition;
import java.util.ArrayList;
import java.util.List;
import org.keycloak.models.KeycloakSession;
import org.keycloak.provider.ProviderConfigProperty;
/**
* @author <a href="mailto:takashi.norimatsu.ws@hitachi.com">Takashi Norimatsu</a>
*/
public class ClientRolesConditionFactory extends AbstractClientPolicyConditionProviderFactory {
public static final String PROVIDER_ID = "client-roles";
public static final String ROLES = "roles";
private static final List<ProviderConfigProperty> configProperties = new ArrayList<ProviderConfigProperty>();
static {
addCommonConfigProperties(configProperties);
ProviderConfigProperty property;
property = new ProviderConfigProperty(ROLES, PROVIDER_ID + ".label", PROVIDER_ID + "-condition.tooltip", ProviderConfigProperty.MULTIVALUED_STRING_TYPE, null);
configProperties.add(property);
}
@Override
public ClientPolicyConditionProvider create(KeycloakSession session) {
return new ClientRolesCondition(session);
}
@Override
public String getId() {
return PROVIDER_ID;
}
@Override
public String getHelpText() {
return "The condition checks whether one of the specified client roles exists on the client to determine whether the policy is applied. This effectively allows client administrator to create client role of specified name on the client to make sure that particular client policy will be applied on requests of this client. Condition is checked during most of OpenID Connect requests (Authorization request, token requests, introspection endpoint request etc).";
}
@Override
public List<ProviderConfigProperty> getConfigProperties() {
return configProperties;
}
}
| apache-2.0 |
ctripcorp/cat.js | libs/async.js | 1083 | var path=require('path');
var cUtil=require(path.resolve(__dirname,'./cutil.js'));
// Async define
function Async(callback){
this._enabled=false;
this._data={};
this._callback=callback;
this._errors=[];
}
Async.prototype.add=function(fn){
var _this=this;
var args=Array.prototype.slice.call(arguments,1);
var guid=cUtil.uid();
this._data[guid]={
fn:fn,
args:args,
done:function(err){
if (err){
_this._errors.push(err);
}
delete _this._data[guid];
_this._check();
}
};
};
Async.prototype.start=function(){
this._enabled=true;
for (var guid in this._data){
if (this._data.hasOwnProperty(guid)){
this._data[guid].fn.apply(null,[this._data[guid].done].concat(this._data[guid].args));
}
}
this._check();
};
Async.prototype._check=function(){
if (this._enabled){
var finish=true;
for (var guid in this._data){
if (this._data.hasOwnProperty(guid)){
finish=false;
break;
}
}
if (finish){
this._callback && this._callback(this._errors.length?this._errors:null);
this._enabled=false;
}
}
};
module.exports=Async; | apache-2.0 |
Eyas/TypeScript | tests/baselines/reference/interfaceClassMerging.js | 1913 | //// [interfaceClassMerging.ts]
interface Foo {
method(a: number): string;
optionalMethod?(a: number): string;
property: string;
optionalProperty?: string;
}
class Foo {
additionalProperty: string;
additionalMethod(a: number): string {
return this.method(0);
}
}
class Bar extends Foo {
method(a: number) {
return this.optionalProperty;
}
}
var bar = new Bar();
bar.method(0);
bar.optionalMethod(1);
bar.property;
bar.optionalProperty;
bar.additionalProperty;
bar.additionalMethod(2);
var obj: {
method(a: number): string;
property: string;
additionalProperty: string;
additionalMethod(a: number): string;
};
bar = obj;
obj = bar;
//// [interfaceClassMerging.js]
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var Foo = /** @class */ (function () {
function Foo() {
}
Foo.prototype.additionalMethod = function (a) {
return this.method(0);
};
return Foo;
}());
var Bar = /** @class */ (function (_super) {
__extends(Bar, _super);
function Bar() {
return _super !== null && _super.apply(this, arguments) || this;
}
Bar.prototype.method = function (a) {
return this.optionalProperty;
};
return Bar;
}(Foo));
var bar = new Bar();
bar.method(0);
bar.optionalMethod(1);
bar.property;
bar.optionalProperty;
bar.additionalProperty;
bar.additionalMethod(2);
var obj;
bar = obj;
obj = bar;
| apache-2.0 |
data-integrations/anaplan | src/main/java/com/anaplan/client/transport/interceptors/UserAgentInjector.java | 1739 | package com.anaplan.client.transport.interceptors;
import com.anaplan.client.Constants;
import com.anaplan.client.ex.AnaplanAPIException;
import feign.RequestInterceptor;
import feign.RequestTemplate;
/**
* Injects the User-Agent header
*/
public class UserAgentInjector implements RequestInterceptor {
private static final int MAJOR_VERSION = Constants.AC_MAJOR;
private static final int MINOR_VERSION = Constants.AC_MINOR;
private static final int REVISION_VERSION = Constants.AC_REVISION;
// commenting it for the Anaplan Connect 1.4.2 release
//private static final String RELEASE_VERSION = Constants.AC_Release;
@Override
public void apply(RequestTemplate template) {
template.header("User-Agent", buildUserAgentIdentifier());
}
/**
* Generate a suitable value for a User-Agent header.
*/
private String buildUserAgentIdentifier() throws AnaplanAPIException {
StringBuilder result = new StringBuilder(getClass().getName());
result.append("/").append(MAJOR_VERSION).append(".")
.append(MINOR_VERSION);
result.append(".").append(REVISION_VERSION);
String vmIdentifier = System.getProperty("java.vm.name") + " ("
+ System.getProperty("java.vendor") + ")/"
+ System.getProperty("java.vm.version") + " ("
+ System.getProperty("java.version") + ")";
result.append("; ").append(vmIdentifier);
String osIdentifier = System.getProperty("os.name") + " ("
+ System.getProperty("os.arch") + ")/"
+ System.getProperty("os.version");
result.append("; ").append(osIdentifier).append(')');
return result.toString();
}
}
| apache-2.0 |
JohnTheodore/cloud-custodian | tools/c7n_mailer/c7n_mailer/sqs_queue_processor.py | 7051 | # Copyright 2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
SQS Message Processing
===============
"""
import base64
import json
import logging
import zlib
import six
from .email_delivery import EmailDelivery
from .sns_delivery import SnsDelivery
DATA_MESSAGE = "maidmsg/1.0"
class MailerSqsQueueIterator(object):
# Copied from custodian to avoid runtime library dependency
msg_attributes = ['sequence_id', 'op', 'ser']
def __init__(self, aws_sqs, queue_url, logger, limit=0, timeout=10):
self.aws_sqs = aws_sqs
self.queue_url = queue_url
self.limit = limit
self.logger = logger
self.timeout = timeout
self.messages = []
# this and the next function make this object iterable with a for loop
def __iter__(self):
return self
def __next__(self):
if self.messages:
return self.messages.pop(0)
response = self.aws_sqs.receive_message(
QueueUrl=self.queue_url,
WaitTimeSeconds=self.timeout,
MaxNumberOfMessages=3,
MessageAttributeNames=self.msg_attributes)
msgs = response.get('Messages', [])
self.logger.debug('Messages received %d', len(msgs))
for m in msgs:
self.messages.append(m)
if self.messages:
return self.messages.pop(0)
raise StopIteration()
next = __next__ # python2.7
def ack(self, m):
self.aws_sqs.delete_message(
QueueUrl=self.queue_url,
ReceiptHandle=m['ReceiptHandle'])
class MailerSqsQueueProcessor(object):
def __init__(self, config, session, logger, max_num_processes=16):
self.config = config
self.logger = logger
self.session = session
self.max_num_processes = max_num_processes
self.receive_queue = self.config['queue_url']
if self.config.get('debug', False):
self.logger.debug('debug logging is turned on from mailer config file.')
logger.setLevel(logging.DEBUG)
"""
Cases
- aws resource is tagged CreatorName: 'milton', ldap_tag_uids has CreatorName,
we do an ldap lookup, get milton's email and send him an email
- you put an email in the to: field of the notify of your policy, we send an email
for all resources enforce by that policy
- you put an sns topic in the to: field of the notify of your policy, we send an sns
message for all resources enforce by that policy
- an lambda enforces a policy based on an event, we lookup the event aws username, get their
ldap email and send them an email about a policy enforcement (from lambda) for the event
- resource-owners has a list of tags, SupportEmail, OwnerEmail, if your resources
include those tags with valid emails, we'll send an email for those resources
any others
- resource-owners has a list of tags, SnSTopic, we'll deliver an sns message for
any resources with SnSTopic set with a value that is a valid sns topic.
"""
def run(self, parallel=False):
self.logger.info("Downloading messages from the SQS queue.")
aws_sqs = self.session.client('sqs')
sqs_messages = MailerSqsQueueIterator(aws_sqs, self.receive_queue, self.logger)
sqs_messages.msg_attributes = ['mtype', 'recipient']
# lambda doesn't support multiprocessing, so we don't instantiate any mp stuff
# unless it's being run from CLI on a normal system with SHM
if parallel:
import multiprocessing
process_pool = multiprocessing.Pool(processes=self.max_num_processes)
for sqs_message in sqs_messages:
self.logger.debug(
"Message id: %s received %s" % (
sqs_message['MessageId'], sqs_message.get('MessageAttributes', '')))
msg_kind = sqs_message.get('MessageAttributes', {}).get('mtype')
if msg_kind:
msg_kind = msg_kind['StringValue']
if not msg_kind == DATA_MESSAGE:
warning_msg = 'Unknown sqs_message format %s' % (sqs_message['Body'][:50])
self.logger.warning(warning_msg)
if parallel:
process_pool.apply_async(self.process_sqs_messsage, args=sqs_message)
else:
self.process_sqs_messsage(sqs_message)
self.logger.debug('Processed sqs_message')
sqs_messages.ack(sqs_message)
if parallel:
process_pool.close()
process_pool.join()
self.logger.info('No sqs_messages left on the queue, exiting c7n_mailer.')
return
# This function when processing sqs messages will only deliver messages over email or sns
# If you explicitly declare which tags are aws_usernames (synonymous with ldap uids)
# in the ldap_uid_tags section of your mailer.yml, we'll do a lookup of those emails
# (and their manager if that option is on) and also send emails there.
def process_sqs_messsage(self, encoded_sqs_message):
sqs_message = json.loads(zlib.decompress(base64.b64decode(encoded_sqs_message['Body'])))
self.logger.debug("Got account:%s message:%s %s:%d policy:%s recipients:%s" % (
sqs_message.get('account', 'na'),
encoded_sqs_message['MessageId'],
sqs_message['policy']['resource'],
len(sqs_message['resources']),
sqs_message['policy']['name'],
', '.join(sqs_message['action']['to'])))
# get the map of email_to_addresses to mimetext messages (with resources baked in)
# and send any emails (to SES or SMTP) if there are email addresses found
email_delivery = EmailDelivery(self.config, self.session, self.logger)
to_addrs_to_email_messages_map = email_delivery.get_to_addrs_email_messages_map(sqs_message)
for email_to_addrs, mimetext_msg in six.iteritems(to_addrs_to_email_messages_map):
email_delivery.send_c7n_email(sqs_message, list(email_to_addrs), mimetext_msg)
# this sections gets the map of sns_to_addresses to rendered_jinja messages
# (with resources baked in) and delivers the message to each sns topic
sns_delivery = SnsDelivery(self.config, self.session, self.logger)
sns_message_packages = sns_delivery.get_sns_message_packages(sqs_message)
sns_delivery.deliver_sns_messages(sns_message_packages, sqs_message)
| apache-2.0 |
andreyleskov/GridDomain | GridDomain.Node/Actors/CommandPipe/MessageProcessors/SyncProjectionProcessor.cs | 302 | using Akka.Actor;
using GridDomain.Node.Actors.Hadlers;
namespace GridDomain.Node.Actors.CommandPipe.MessageProcessors {
public class SyncProjectionProcessor : ActorAskMessageProcessor<HandlerExecuted>
{
public SyncProjectionProcessor(IActorRef processor) : base(processor) { }
}
} | apache-2.0 |
jivesoftware/amza | amza-api/src/main/java/com/jivesoftware/os/amza/api/take/TakeCursors.java | 752 | package com.jivesoftware.os.amza.api.take;
import com.jivesoftware.os.amza.api.ring.RingMember;
import java.util.List;
/**
*
*/
public class TakeCursors {
public final List<RingMemberCursor> ringMemberCursors;
public final boolean tookToEnd;
public TakeCursors(List<RingMemberCursor> ringMemberCursors, boolean tookToEnd) {
this.ringMemberCursors = ringMemberCursors;
this.tookToEnd = tookToEnd;
}
public static class RingMemberCursor {
public final RingMember ringMember;
public final long transactionId;
public RingMemberCursor(RingMember ringMember, long transactionId) {
this.ringMember = ringMember;
this.transactionId = transactionId;
}
}
}
| apache-2.0 |
ua-eas/ua-rice-2.1.9 | kns/src/main/java/org/kuali/rice/kns/web/struts/action/KualiMultipleValueLookupAction.java | 31616 | /**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kns.web.struts.action;
import org.apache.commons.beanutils.BeanComparator;
import org.apache.commons.lang.StringUtils;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.kuali.rice.core.api.util.RiceConstants;
import org.kuali.rice.kns.lookup.HtmlData;
import org.kuali.rice.kns.lookup.LookupResultsService;
import org.kuali.rice.kns.lookup.LookupUtils;
import org.kuali.rice.kns.lookup.Lookupable;
import org.kuali.rice.kns.service.KNSServiceLocator;
import org.kuali.rice.kns.web.struts.form.MultipleValueLookupForm;
import org.kuali.rice.kns.web.ui.Column;
import org.kuali.rice.kns.web.ui.ResultRow;
import org.kuali.rice.krad.lookup.CollectionIncomplete;
import org.kuali.rice.krad.service.KRADServiceLocator;
import org.kuali.rice.krad.service.SequenceAccessorService;
import org.kuali.rice.krad.util.GlobalVariables;
import org.kuali.rice.krad.util.KRADConstants;
import org.kuali.rice.krad.util.UrlFactory;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/**
* This class serves as the struts action for implementing multiple value lookups
*/
public class KualiMultipleValueLookupAction extends KualiLookupAction implements KualiTableRenderAction {
private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(KualiMultipleValueLookupAction.class);
/**
* If there is no app param defined for the # rows/page, then this value
* will be used for the default
*
* @see KualiMultipleValueLookupAction#getMaxRowsPerPage(MultipleValueLookupForm)
*/
public static final int DEFAULT_MAX_ROWS_PER_PAGE = 50;
/**
* This method performs the search, and will be responsible for persisting the results via the LookupResultsService.
* This overrides the superclass's search action method b/c of the differences in how the results are generated and it populates
* certain attributes that are specific to MultipleValueLookupForm
*
* @param mapping
* @param form must be an instance of MultipleValueLookupForm
* @param request
* @param response
*/
@Override
public ActionForward search(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
MultipleValueLookupForm multipleValueLookupForm = (MultipleValueLookupForm) form;
// If this is a new search, clear out the old search results.
String methodToCall = findMethodToCall(form, request);
if (methodToCall.equalsIgnoreCase("search")) {
GlobalVariables.getUserSession().removeObjectsByPrefix(KRADConstants.SEARCH_METHOD);
}
Lookupable kualiLookupable = multipleValueLookupForm.getLookupable();
if (kualiLookupable == null) {
LOG.error("Lookupable is null.");
throw new RuntimeException("Lookupable is null.");
}
Collection displayList = new ArrayList();
ArrayList<ResultRow> resultTable = new ArrayList<ResultRow>();
// validate search parameters
kualiLookupable.validateSearchParameters(multipleValueLookupForm.getFields());
boolean bounded = true;
displayList = performMultipleValueLookup(multipleValueLookupForm, resultTable, getMaxRowsPerPage(multipleValueLookupForm), bounded);
if (kualiLookupable.isSearchUsingOnlyPrimaryKeyValues()) {
multipleValueLookupForm.setSearchUsingOnlyPrimaryKeyValues(true);
multipleValueLookupForm.setPrimaryKeyFieldLabels(kualiLookupable.getPrimaryKeyFieldLabels());
}
else {
multipleValueLookupForm.setSearchUsingOnlyPrimaryKeyValues(false);
multipleValueLookupForm.setPrimaryKeyFieldLabels(KRADConstants.EMPTY_STRING);
}
//request.setAttribute("reqSearchResultsActualSize", ((CollectionIncomplete) displayList).getActualSizeIfTruncated());
if ( displayList instanceof CollectionIncomplete ){
request.setAttribute("reqSearchResultsActualSize", ((CollectionIncomplete) displayList).getActualSizeIfTruncated());
} else {
request.setAttribute("reqSearchResultsActualSize", displayList.size() );
}
request.setAttribute("reqSearchResults", resultTable);
//multipleValueLookupForm.setResultsActualSize((int) ((CollectionIncomplete) displayList).getActualSizeIfTruncated().longValue());
if ( displayList instanceof CollectionIncomplete ){
multipleValueLookupForm.setResultsActualSize((int) ((CollectionIncomplete) displayList).getActualSizeIfTruncated().longValue());
} else {
multipleValueLookupForm.setResultsActualSize(displayList.size());
}
multipleValueLookupForm.setResultsLimitedSize(resultTable.size());
if (request.getParameter(KRADConstants.SEARCH_LIST_REQUEST_KEY) != null) {
GlobalVariables.getUserSession().removeObject(request.getParameter(KRADConstants.SEARCH_LIST_REQUEST_KEY));
}
request.setAttribute(KRADConstants.SEARCH_LIST_REQUEST_KEY, GlobalVariables.getUserSession().addObjectWithGeneratedKey(resultTable, KRADConstants.SEARCH_LIST_KEY_PREFIX));
request.getParameter(KRADConstants.REFRESH_CALLER);
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
/**
* This method switches to another page on a multi-value lookup
*
* @param mapping
* @param form must be an instance of MultipleValueLookupForm
* @param request
* @param response
* @return
* @throws Exception
*/
@Override
public ActionForward switchToPage(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
MultipleValueLookupForm multipleValueLookupForm = (MultipleValueLookupForm) form;
List<ResultRow> resultTable = switchToPage(multipleValueLookupForm, getMaxRowsPerPage(multipleValueLookupForm));
request.setAttribute("reqSearchResults", resultTable);
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
/**
* This method sorts a column. If the page is currently sorted on a certain column,
* and the same column is selected to be sorted again, then the results will be
* reversed. After the search method is called, it is difficult to determine the sort
* order of the result table, so no column is considered sorted. So, after a search, we were
* to click sort on an already sorted column, it would appear to have no effect. Subsequent clicks
* would tell you
*
* @param mapping
* @param form must be an instance of MultipleValueLookupForm
* @param request
* @param response
* @return
* @throws Exception
*/
@Override
public ActionForward sort(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
MultipleValueLookupForm multipleValueLookupForm = (MultipleValueLookupForm) form;
List<ResultRow> resultTable = sort(multipleValueLookupForm, getMaxRowsPerPage(multipleValueLookupForm));
request.setAttribute("reqSearchResults", resultTable);
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
/**
* This method does the processing necessary to return selected results and sends a redirect back to the lookup caller
*
* @param mapping
* @param form must be an instance of MultipleValueLookupForm
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward prepareToReturnSelectedResults(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
MultipleValueLookupForm multipleValueLookupForm = (MultipleValueLookupForm) form;
if (StringUtils.isBlank(multipleValueLookupForm.getLookupResultsSequenceNumber())) {
// no search was executed
return prepareToReturnNone(mapping, form, request, response);
}
prepareToReturnSelectedResultBOs(multipleValueLookupForm);
// build the parameters for the refresh url
Properties parameters = new Properties();
parameters.put(KRADConstants.LOOKUP_RESULTS_BO_CLASS_NAME, multipleValueLookupForm.getBusinessObjectClassName());
parameters.put(KRADConstants.LOOKUP_RESULTS_SEQUENCE_NUMBER, multipleValueLookupForm.getLookupResultsSequenceNumber());
parameters.put(KRADConstants.DOC_FORM_KEY, multipleValueLookupForm.getFormKey());
parameters.put(KRADConstants.DISPATCH_REQUEST_PARAMETER, KRADConstants.RETURN_METHOD_TO_CALL);
parameters.put(KRADConstants.REFRESH_CALLER, KRADConstants.MULTIPLE_VALUE);
parameters.put(KRADConstants.ANCHOR, multipleValueLookupForm.getLookupAnchor());
parameters.put(KRADConstants.LOOKED_UP_COLLECTION_NAME, multipleValueLookupForm.getLookedUpCollectionName());
if(multipleValueLookupForm.getDocNum() != null){
parameters.put(KRADConstants.DOC_NUM, multipleValueLookupForm.getDocNum());
}
String backUrl = UrlFactory.parameterizeUrl(multipleValueLookupForm.getBackLocation(), parameters);
return new ActionForward(backUrl, true);
}
/**
* This method selects all results across all pages
* @param mapping
* @param form must be an instance of MultipleValueLookupForm
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward selectAll(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
MultipleValueLookupForm multipleValueLookupForm = (MultipleValueLookupForm) form;
List<ResultRow> resultTable = selectAll(multipleValueLookupForm, getMaxRowsPerPage(multipleValueLookupForm));
request.setAttribute("reqSearchResults", resultTable);
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
/**
* This method unselects all results across all pages
*
* @param mapping
* @param form must be an instance of MultipleValueLookupForm
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward unselectAll(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
MultipleValueLookupForm multipleValueLookupForm = (MultipleValueLookupForm) form;
List<ResultRow> resultTable = unselectAll(multipleValueLookupForm, getMaxRowsPerPage(multipleValueLookupForm));
request.setAttribute("reqSearchResults", resultTable);
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
/**
* This method overrides the super class cancel method because it is basically equivalent to clicking prepare to return none
*
* @see KualiLookupAction#cancel(org.apache.struts.action.ActionMapping, org.apache.struts.action.ActionForm, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
@Override
public ActionForward cancel(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
return prepareToReturnNone(mapping, form, request, response);
}
/**
* This method returns none of the selected results and redirects back to the lookup caller.
* @param mapping
* @param form must be an instance of MultipleValueLookupForm
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward prepareToReturnNone(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
MultipleValueLookupForm multipleValueLookupForm = (MultipleValueLookupForm) form;
prepareToReturnNone(multipleValueLookupForm);
// build the parameters for the refresh url
Properties parameters = new Properties();
parameters.put(KRADConstants.DOC_FORM_KEY, multipleValueLookupForm.getFormKey());
parameters.put(KRADConstants.DISPATCH_REQUEST_PARAMETER, KRADConstants.RETURN_METHOD_TO_CALL);
parameters.put(KRADConstants.REFRESH_CALLER, KRADConstants.MULTIPLE_VALUE);
parameters.put(KRADConstants.ANCHOR, multipleValueLookupForm.getLookupAnchor());
if(multipleValueLookupForm.getDocNum() != null){
parameters.put(KRADConstants.DOC_NUM, multipleValueLookupForm.getDocNum());
}
String backUrl = UrlFactory.parameterizeUrl(multipleValueLookupForm.getBackLocation(), parameters);
return new ActionForward(backUrl, true);
}
/**
* This method prepares to export results. Note: this method will not look for any rows selected since the last page view, so it is best
* that exporting opens in a new browser window.
*
* @param mapping
* @param form must be an instance of MultipleValueLookupForm
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward export(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
MultipleValueLookupForm multipleValueLookupForm = (MultipleValueLookupForm) form;
List<ResultRow> resultTable = prepareToExport(multipleValueLookupForm);
request.setAttribute("reqSearchResults", resultTable);
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
/**
* This method performs the lookup and returns a collection of lookup items. Also initializes values in the form
* that will allow the multiple value lookup page to render
*
* @param multipleValueLookupForm
* @param resultTable a list of result rows (used to generate what's shown in the UI). This list will be modified by this method
* @param maxRowsPerPage
* @param bounded whether the results will be bounded
* @return the list of result BOs, possibly bounded by size
*/
protected Collection performMultipleValueLookup(MultipleValueLookupForm multipleValueLookupForm, List<ResultRow> resultTable, int maxRowsPerPage, boolean bounded) {
Lookupable lookupable = multipleValueLookupForm.getLookupable();
Collection displayList = lookupable.performLookup(multipleValueLookupForm, resultTable, bounded);
List defaultSortColumns = lookupable.getDefaultSortColumns();
if (defaultSortColumns != null && !defaultSortColumns.isEmpty() && resultTable != null && !resultTable.isEmpty()) {
// there's a default sort order, just find the first sort column, and we can't go wrong
String firstSortColumn = (String) defaultSortColumns.get(0);
// go thru the first result row to find the index of the column (more efficient than calling lookupable.getColumns since we don't have to recreate column list)
int firstSortColumnIdx = -1;
List<Column> columnsForFirstResultRow = resultTable.get(0).getColumns();
for (int i = 0; i < columnsForFirstResultRow.size(); i++) {
if (StringUtils.equals(firstSortColumn, columnsForFirstResultRow.get(i).getPropertyName())) {
firstSortColumnIdx = i;
break;
}
}
multipleValueLookupForm.setColumnToSortIndex(firstSortColumnIdx);
}
else {
// don't know how results were sorted, so we just say -1
multipleValueLookupForm.setColumnToSortIndex(-1);
}
// we just performed the lookup, so we're on the first page (indexed from 0)
multipleValueLookupForm.jumpToFirstPage(resultTable.size(), maxRowsPerPage);
SequenceAccessorService sas = KRADServiceLocator.getSequenceAccessorService();
Long nextSeq = sas.getNextAvailableSequenceNumber(KRADConstants.LOOKUP_RESULTS_SEQUENCE);
String lookupResultsSequenceNumber = nextSeq.toString();
multipleValueLookupForm.setLookupResultsSequenceNumber(lookupResultsSequenceNumber);
try {
LookupResultsService lookupResultsService = KNSServiceLocator.getLookupResultsService();
lookupResultsService.persistResultsTable(lookupResultsSequenceNumber, resultTable,
GlobalVariables.getUserSession().getPerson().getPrincipalId());
}
catch (Exception e) {
LOG.error("error occured trying to persist multiple lookup results", e);
throw new RuntimeException("error occured trying to persist multiple lookup results");
}
// since new search, nothing's checked
multipleValueLookupForm.setCompositeObjectIdMap(new HashMap<String, String>());
return displayList;
}
/**
* This method performs the operations necessary for a multiple value lookup to switch to another page of results and rerender the page
* @param multipleValueLookupForm
* @param maxRowsPerPage
* @return a list of result rows, used by the UI to render the page
*/
protected List<ResultRow> switchToPage(MultipleValueLookupForm multipleValueLookupForm, int maxRowsPerPage) {
String lookupResultsSequenceNumber = multipleValueLookupForm.getLookupResultsSequenceNumber();
List<ResultRow> resultTable = null;
try {
resultTable = KNSServiceLocator.getLookupResultsService().retrieveResultsTable(lookupResultsSequenceNumber, GlobalVariables.getUserSession().getPerson().getPrincipalId());
}
catch (Exception e) {
LOG.error("error occured trying to retrieve multiple lookup results", e);
throw new RuntimeException("error occured trying to retrieve multiple lookup results");
}
multipleValueLookupForm.jumpToPage(multipleValueLookupForm.getSwitchToPageNumber(), resultTable.size(), maxRowsPerPage);
multipleValueLookupForm.setColumnToSortIndex(Integer.parseInt(multipleValueLookupForm.getPreviouslySortedColumnIndex()));
multipleValueLookupForm.setCompositeObjectIdMap(LookupUtils.generateCompositeSelectedObjectIds(multipleValueLookupForm.getPreviouslySelectedObjectIdSet(),
multipleValueLookupForm.getDisplayedObjectIdSet(), multipleValueLookupForm.getSelectedObjectIdSet()));
return resultTable;
}
/**
* This method performs the operations necessary for a multiple value lookup to sort results and rerender the page
*
* @param multipleValueLookupForm
* @param maxRowsPerPage
* @return a list of result rows, used by the UI to render the page
*/
protected List<ResultRow> sort(MultipleValueLookupForm multipleValueLookupForm, int maxRowsPerPage) {
String lookupResultsSequenceNumber = multipleValueLookupForm.getLookupResultsSequenceNumber();
LookupResultsService lookupResultsService = KNSServiceLocator.getLookupResultsService();
List<ResultRow> resultTable = null;
try {
resultTable = lookupResultsService.retrieveResultsTable(lookupResultsSequenceNumber, GlobalVariables.getUserSession().getPerson().getPrincipalId());
}
catch (Exception e) {
LOG.error("error occured trying to retrieve multiple lookup results", e);
throw new RuntimeException("error occured trying to retrieve multiple lookup results");
}
int columnToSortOn = multipleValueLookupForm.getColumnToSortIndex();
int columnCurrentlySortedOn = Integer.parseInt(multipleValueLookupForm.getPreviouslySortedColumnIndex());
// if columnCurrentlySortedOn is -1, that means that we don't know which column we were originally sorting on
// after a search, it's hard to tell which of the columns we're sorted on,
if (columnToSortOn == columnCurrentlySortedOn) {
// we're already sorted on the same column that the user clicked on, so we reverse the list
Collections.reverse(resultTable);
}
else {
// sorting on a different column, so we have to sort
// HACK ALERT for findBestValueComparatorForColumn, since there's no central place to know
// which comparator we should use to compare values in a column
Collections.sort(resultTable, new BeanComparator("columns[" + columnToSortOn + "].propertyValue", LookupUtils.findBestValueComparatorForColumn(resultTable, columnToSortOn)));
}
// repersist the list
try {
lookupResultsService.persistResultsTable(lookupResultsSequenceNumber, resultTable,
GlobalVariables.getUserSession().getPerson().getPrincipalId());
}
catch (Exception e) {
LOG.error("error occured trying to persist multiple lookup results", e);
throw new RuntimeException("error occured trying to persist multiple lookup results");
}
// we just performed the sort, so go back to first page
multipleValueLookupForm.jumpToFirstPage(resultTable.size(), maxRowsPerPage);
multipleValueLookupForm.setCompositeObjectIdMap(LookupUtils.generateCompositeSelectedObjectIds(multipleValueLookupForm.getPreviouslySelectedObjectIdSet(),
multipleValueLookupForm.getDisplayedObjectIdSet(), multipleValueLookupForm.getSelectedObjectIdSet()));
return resultTable;
}
/**
* This method performs the operations necessary for a multiple value lookup keep track of which results have been selected to be returned
* to the calling document. Note, this method does not actually requery for the results.
*
* @param multipleValueLookupForm
*/
protected void prepareToReturnSelectedResultBOs(MultipleValueLookupForm multipleValueLookupForm) {
String lookupResultsSequenceNumber = multipleValueLookupForm.getLookupResultsSequenceNumber();
if (StringUtils.isBlank(lookupResultsSequenceNumber)) {
// pressed return before searching
return;
}
Map<String, String> compositeObjectIdMap = LookupUtils.generateCompositeSelectedObjectIds(multipleValueLookupForm.getPreviouslySelectedObjectIdSet(),
multipleValueLookupForm.getDisplayedObjectIdSet(), multipleValueLookupForm.getSelectedObjectIdSet());
Set<String> compositeObjectIds = compositeObjectIdMap.keySet();
try {
LookupResultsService lookupResultsService = KNSServiceLocator.getLookupResultsService();
lookupResultsService.persistSelectedObjectIds(lookupResultsSequenceNumber, compositeObjectIds,
GlobalVariables.getUserSession().getPerson().getPrincipalId());
}
catch (Exception e) {
LOG.error("error occured trying to retrieve selected multiple lookup results", e);
throw new RuntimeException("error occured trying to retrieve selected multiple lookup results");
}
}
/**
* This method performs the operations necessary for a multiple value lookup to return no results to the calling page
*
* @param multipleValueLookupForm
*/
protected void prepareToReturnNone(MultipleValueLookupForm multipleValueLookupForm) {
String lookupResultsSequenceNumber = multipleValueLookupForm.getLookupResultsSequenceNumber();
try {
if (StringUtils.isNotBlank(lookupResultsSequenceNumber)) {
// we're returning nothing, so we try to get rid of stuff
LookupResultsService lookupResultsService = KNSServiceLocator.getLookupResultsService();
lookupResultsService.clearPersistedLookupResults(lookupResultsSequenceNumber);
multipleValueLookupForm.setLookupResultsSequenceNumber(null);
}
}
catch (Exception e) {
// not a big deal, continue on and purge w/ a batch job
LOG.error("error occured trying to clear lookup results seq nbr " + lookupResultsSequenceNumber, e);
}
}
/**
* This method performs the operations necessary for a multiple value lookup to export the rows via display tag
*
* Note: this method assumes that the export will be opened in a new browser window, therefore, persisting the selected
* checkboxes will not be needed.
*
* @param multipleValueLookupForm
* @return a list of result rows, to be used by display tag to render the results
*/
protected List<ResultRow> prepareToExport(MultipleValueLookupForm multipleValueLookupForm) {
String lookupResultsSequenceNumber = multipleValueLookupForm.getLookupResultsSequenceNumber();
List<ResultRow> resultTable = null;
try {
LookupResultsService lookupResultsService = KNSServiceLocator.getLookupResultsService();
resultTable = lookupResultsService.retrieveResultsTable(lookupResultsSequenceNumber, GlobalVariables.getUserSession().getPerson().getPrincipalId());
}
catch (Exception e) {
LOG.error("error occured trying to export multiple lookup results", e);
throw new RuntimeException("error occured trying to export multiple lookup results");
}
return resultTable;
}
/**
* This method performs the operations necessary for a multiple value lookup to select all of the results and rerender the page
* @param multipleValueLookupForm
* @param maxRowsPerPage
* @return a list of result rows, used by the UI to render the page
*/
protected List<ResultRow> selectAll(MultipleValueLookupForm multipleValueLookupForm, int maxRowsPerPage) {
String lookupResultsSequenceNumber = multipleValueLookupForm.getLookupResultsSequenceNumber();
List<ResultRow> resultTable = null;
try {
LookupResultsService lookupResultsService = KNSServiceLocator.getLookupResultsService();
resultTable = lookupResultsService.retrieveResultsTable(lookupResultsSequenceNumber, GlobalVariables.getUserSession().getPerson().getPrincipalId());
}
catch (Exception e) {
LOG.error("error occured trying to export multiple lookup results", e);
throw new RuntimeException("error occured trying to export multiple lookup results");
}
Map<String, String> selectedObjectIds = new HashMap<String, String>();
for (ResultRow row : resultTable) {
String objId = row.getObjectId();
HtmlData.InputHtmlData returnUrl = (HtmlData.InputHtmlData) row.getReturnUrlHtmlData();
returnUrl.setChecked(HtmlData.InputHtmlData.CHECKBOX_CHECKED_VALUE);
row.setReturnUrl(returnUrl.constructCompleteHtmlTag());
if(objId != null){
selectedObjectIds.put(objId, objId);
}
}
multipleValueLookupForm.jumpToPage(multipleValueLookupForm.getViewedPageNumber(), resultTable.size(), maxRowsPerPage);
multipleValueLookupForm.setColumnToSortIndex(Integer.parseInt(multipleValueLookupForm.getPreviouslySortedColumnIndex()));
multipleValueLookupForm.setCompositeObjectIdMap(selectedObjectIds);
return resultTable;
}
@Override
public ActionForward clearValues(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException {
MultipleValueLookupForm multipleValueLookupForm = (MultipleValueLookupForm) form;
// call the following methods to clear the persisted results
prepareToReturnNone(multipleValueLookupForm);
return super.clearValues(mapping, form, request, response);
}
/**
* This method performs the operations necessary for a multiple value lookup to unselect all of the results and rerender the page
* @param multipleValueLookupForm
* @param maxRowsPerPage
* @return a list of result rows, used by the UI to render the page
*/
protected List<ResultRow> unselectAll(MultipleValueLookupForm multipleValueLookupForm, int maxRowsPerPage) {
String lookupResultsSequenceNumber = multipleValueLookupForm.getLookupResultsSequenceNumber();
List<ResultRow> resultTable = null;
try {
LookupResultsService lookupResultsService = KNSServiceLocator.getLookupResultsService();
resultTable = lookupResultsService.retrieveResultsTable(lookupResultsSequenceNumber, GlobalVariables.getUserSession().getPerson().getPrincipalId());
}
catch (Exception e) {
LOG.error("error occured trying to export multiple lookup results", e);
throw new RuntimeException("error occured trying to export multiple lookup results");
}
Map<String, String> selectedObjectIds = new HashMap<String, String>();
// keep map empty since we're not selecting anything
multipleValueLookupForm.jumpToPage(multipleValueLookupForm.getViewedPageNumber(), resultTable.size(), maxRowsPerPage);
multipleValueLookupForm.setColumnToSortIndex(Integer.parseInt(multipleValueLookupForm.getPreviouslySortedColumnIndex()));
multipleValueLookupForm.setCompositeObjectIdMap(selectedObjectIds);
return resultTable;
}
/**
* This method computes the max number of rows that should be rendered per page for a multiple value lookup.
*
* This method first looks for an application parameter in FS_PARM_T, group SYSTEM, multipleValueLookupResultsPerPage
*
* if someone wants to implement something where a user can decide how many results to display per page,
* this method is the place to do it. Make this method read form values to determine the max rows per page based on the user inputs
*
* @see org.kuali.rice.krad.util.KRADConstants.SystemGroupParameterNames#MULTIPLE_VALUE_LOOKUP_RESULTS_PER_PAGE
* @see #DEFAULT_MAX_ROWS_PER_PAGE
* @param multipleValueLookupForm the form
* @return
*/
protected int getMaxRowsPerPage(MultipleValueLookupForm multipleValueLookupForm) {
Integer appMaxRowsPerPage = LookupUtils.getApplicationMaximumSearchResulsPerPageForMultipleValueLookups();
if (appMaxRowsPerPage == null) {
LOG.warn("Couldn't find application results per page for MV lookups. Using default of " + DEFAULT_MAX_ROWS_PER_PAGE);
appMaxRowsPerPage = new Integer(DEFAULT_MAX_ROWS_PER_PAGE);
}
return appMaxRowsPerPage;
}
}
| apache-2.0 |
freedomtan/tensorflow | tensorflow/core/kernels/mkl/mkl_fused_ops_test.cc | 65169 | /* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifdef INTEL_MKL
#include "tensorflow/cc/ops/const_op.h"
#include "tensorflow/cc/ops/image_ops.h"
#include "tensorflow/cc/ops/nn_ops.h"
#include "tensorflow/cc/ops/nn_ops_internal.h"
#include "tensorflow/cc/ops/standard_ops.h"
#include "tensorflow/core/common_runtime/kernel_benchmark_testlib.h"
#include "tensorflow/core/framework/fake_input.h"
#include "tensorflow/core/framework/node_def_builder.h"
#include "tensorflow/core/framework/tensor.h"
#include "tensorflow/core/framework/types.pb.h"
#include "tensorflow/core/graph/mkl_graph_util.h"
#include "tensorflow/core/kernels/conv_ops_gpu.h"
#include "tensorflow/core/kernels/ops_testutil.h"
#include "tensorflow/core/kernels/ops_util.h"
#include "tensorflow/core/platform/test.h"
#include "tensorflow/core/platform/test_benchmark.h"
#include "tensorflow/core/platform/types.h"
#include "tensorflow/core/public/session.h"
namespace tensorflow {
// Helper class for converting MKL tensors to TF tensors and comparing to
// expected values
static const uint8 dummy_tensor[] = {0, 0, 0, 0, 0, 0, 0, 0};
static const TensorShape dummy_shape({8});
using BiasAddGraphRunner =
std::function<void(const Tensor& input_data, const Tensor& filter_data,
const Tensor& bias_data, Tensor* out)>;
using FusedGraphRunner =
std::function<void(const Tensor& input_data, const Tensor& filter_data,
const Tensor& bias_data,
const std::vector<string>& fused_ops, Tensor* out)>;
template <typename T>
class CommonTestUtilities : public OpsTestBase {
public:
void PerformConversion(DataType dtype, const Tensor& tensor,
const Tensor& mkl_meta_tensor, Tensor* output) {
// Create an MKL to TF conversion node and execute it
TF_EXPECT_OK(NodeDefBuilder("mkl_to_tf_op", "_MklToTf")
.Input(FakeInput(dtype)) // Input
.Input(FakeInput(DT_UINT8)) // Mkl second tensor
.Attr("T", dtype)
.Attr("_kernel", "MklLayoutDependentOp")
.Finalize(node_def()));
TF_EXPECT_OK(InitOp());
AddInputFromArray<T>(tensor.shape(), tensor.flat<T>());
AddInputFromArray<uint8>(mkl_meta_tensor.shape(),
mkl_meta_tensor.flat<uint8>());
TF_ASSERT_OK(RunOpKernel());
*output = *GetOutput(0);
}
// Runs a Tensorflow graph defined by the root scope, and fetches the result
// of 'fetch' node into the output Tensor.
static void RunAndFetch(const tensorflow::Scope& root, const string& fetch,
Tensor* output) {
tensorflow::GraphDef graph;
TF_ASSERT_OK(root.ToGraphDef(&graph));
std::unique_ptr<tensorflow::Session> session(
tensorflow::NewSession(tensorflow::SessionOptions()));
TF_ASSERT_OK(session->Create(graph));
std::vector<Tensor> unfused_tensors;
TF_ASSERT_OK(session->Run({}, {fetch}, {}, &unfused_tensors));
*output = unfused_tensors[0];
}
void ConvertAndCompare(DataType dtype, const Tensor& tensor,
const Tensor& mkl_meta_tensor,
const Tensor& expected) {
Tensor output;
PerformConversion(dtype, tensor, mkl_meta_tensor, &output);
test::ExpectTensorNear<T>(expected, output, 1e-5);
}
void ConvertAndCompareIntegral(DataType dtype, const Tensor& tensor,
const Tensor& mkl_meta_tensor,
const Tensor& expected) {
Tensor output;
PerformConversion(dtype, tensor, mkl_meta_tensor, &output);
test::ExpectTensorEqual<T>(expected, output);
}
void TestBody() {}
static void VerifyBiasAddTensorsClose(int depth, int image_width,
int image_height, int image_batch_count,
int filter_size, int filter_count,
const BiasAddGraphRunner& run_default,
const BiasAddGraphRunner& run_fused) {
DataType dtype = DataTypeToEnum<T>::v();
Tensor image(dtype, {image_batch_count, image_height, image_width, depth});
image.flat<T>() = image.flat<T>().template setRandom<random_gen_>();
Tensor filter(dtype, {filter_size, filter_size, depth, filter_count});
filter.flat<T>() = filter.flat<T>().template setRandom<random_gen_>();
const int bias_size = filter_count;
Tensor bias(dtype, {bias_size});
bias.flat<T>() = bias.flat<T>().template setRandom<random_gen_>();
Tensor conv_2d;
Tensor fused_conv_2d;
run_default(image, filter, bias, &conv_2d);
run_fused(image, filter, bias, &fused_conv_2d);
ASSERT_EQ(conv_2d.dtype(), fused_conv_2d.dtype());
ASSERT_EQ(conv_2d.shape(), fused_conv_2d.shape());
test::ExpectClose(conv_2d, fused_conv_2d, 1e-5);
}
static void VerifyFusedTensorsClose(int depth, int image_width,
int image_height, int image_batch_count,
int filter_size, int filter_count,
int bias_size,
const std::vector<string>& fused_ops,
const FusedGraphRunner& run_default,
const FusedGraphRunner& run_fused) {
DataType dtype = DataTypeToEnum<T>::v();
Tensor image(dtype, {image_batch_count, image_height, image_width, depth});
image.flat<T>() = image.flat<T>().template setRandom<random_gen_>();
Tensor filter(dtype, {filter_size, filter_size, depth, filter_count});
filter.flat<T>() = filter.flat<T>().template setRandom<random_gen_>();
Tensor bias(dtype, {bias_size});
bias.flat<T>() = bias.flat<T>().template setRandom<random_gen_>();
Tensor conv_2d;
Tensor fused_conv_2d;
run_default(image, filter, bias, fused_ops, &conv_2d);
run_fused(image, filter, bias, fused_ops, &fused_conv_2d);
ASSERT_EQ(conv_2d.dtype(), fused_conv_2d.dtype());
ASSERT_EQ(conv_2d.shape(), fused_conv_2d.shape());
test::ExpectClose(conv_2d, fused_conv_2d, 1e-5);
}
static void VerifyFusedMatrixClose(int depth, int batch, int weight_count,
const std::vector<string>& fused_ops,
const FusedGraphRunner& run_default,
const FusedGraphRunner& run_fused) {
DataType dtype = DataTypeToEnum<T>::v();
Tensor input(dtype, {batch, depth});
input.flat<T>() = input.flat<T>().template setRandom<random_gen_>();
Tensor weight(dtype, {depth, weight_count});
weight.flat<T>() = weight.flat<T>().template setRandom<random_gen_>();
Tensor bias(dtype, {weight_count});
bias.flat<T>() = bias.flat<T>().template setRandom<random_gen_>();
Tensor output;
Tensor fused_output;
run_default(input, weight, bias, fused_ops, &output);
run_fused(input, weight, bias, fused_ops, &fused_output);
ASSERT_EQ(output.dtype(), fused_output.dtype());
ASSERT_EQ(output.shape(), fused_output.shape());
test::ExpectClose(output, fused_output, 1e-5);
}
private:
using random_gen_ = Eigen::internal::NormalRandomGenerator<T>;
};
// Testing MKL's fused convolution ops
template <typename T>
class MklFusedConv2DOpTest : public OpsTestBase {
protected:
static constexpr int kDepth = 3;
static constexpr int kImageWidth = 32;
static constexpr int kImageHeight = 32;
static constexpr int kImageBatchCount = 8;
void RunConv2DUnfused(const Tensor& input_data, const Tensor& filter_data,
const Tensor& bias_data,
const std::vector<string>& fused_ops, Tensor* output,
int stride = 1) {
auto root = tensorflow::Scope::NewRootScope();
auto input_data_op =
ops::Const(root.WithOpName("input"), Input::Initializer(input_data));
Output next_op = ops::Conv2D(
root.WithOpName("conv"), input_data_op,
ops::Const(root.WithOpName("filter"), Input::Initializer(filter_data)),
{1, stride, stride, 1}, "SAME");
string last_op = "";
if (std::find(fused_ops.begin(), fused_ops.end(), "BiasAdd") !=
fused_ops.end()) {
last_op = "with_bias";
next_op = ops::BiasAdd(
root.WithOpName(last_op), next_op,
ops::Const(root.WithOpName("bias"), Input::Initializer(bias_data)));
}
if (std::find(fused_ops.begin(), fused_ops.end(), "Add") !=
fused_ops.end()) {
last_op = "with_add";
next_op = ops::AddN(root.WithOpName("with_add"),
std::initializer_list<Input>{next_op, input_data_op});
}
if (std::find(fused_ops.begin(), fused_ops.end(), "Relu") !=
fused_ops.end()) {
last_op = "with_relu";
next_op = ops::Relu(root.WithOpName(last_op), next_op);
}
if (std::find(fused_ops.begin(), fused_ops.end(), "Relu6") !=
fused_ops.end()) {
last_op = "with_relu6";
next_op = ops::Relu6(root.WithOpName(last_op), next_op);
}
if (std::find(fused_ops.begin(), fused_ops.end(), "Elu") !=
fused_ops.end()) {
last_op = "with_elu";
next_op = ops::Elu(root.WithOpName(last_op), next_op);
}
if (std::find(fused_ops.begin(), fused_ops.end(), "LeakyRelu") !=
fused_ops.end()) {
last_op = "with_leakyrelu";
next_op = ops::internal::LeakyRelu(root.WithOpName(last_op), next_op);
}
CommonTestUtilities<T>::RunAndFetch(root, last_op, output);
}
void RunMklFusedConv2DOp(const Tensor& image, const Tensor& filter,
const std::vector<Tensor>& args,
const std::vector<string>& fused_ops, Tensor* output,
int stride = 1) {
DataType dtype = DataTypeToEnum<T>::v();
int num_args = static_cast<int>(args.size());
if (!NativeFormatEnabled()) {
TF_EXPECT_OK(NodeDefBuilder("fused_conv_op", "_MklFusedConv2D")
.Input(FakeInput(dtype))
.Input(FakeInput(dtype))
.Input(FakeInput(num_args, dtype))
.Input(FakeInput(DT_UINT8))
.Input(FakeInput(DT_UINT8))
.Input(FakeInput(num_args, DT_UINT8))
.Attr("T", dtype)
.Attr("num_args", num_args)
.Attr("strides", {1, stride, stride, 1})
.Attr("padding", "SAME")
.Attr("fused_ops", fused_ops)
.Attr("_kernel", "MklLayoutDependentOp")
.Finalize(node_def()));
} else {
TF_EXPECT_OK(NodeDefBuilder("fused_conv_op", "_MklNativeFusedConv2D")
.Input(FakeInput(dtype))
.Input(FakeInput(dtype))
.Input(FakeInput(num_args, dtype))
.Attr("T", dtype)
.Attr("num_args", num_args)
.Attr("strides", {1, stride, stride, 1})
.Attr("padding", "SAME")
.Attr("fused_ops", fused_ops)
.Attr("_kernel", "MklNameChangeOp")
.Finalize(node_def()));
}
TF_EXPECT_OK(InitOp());
AddInputFromArray<T>(image.shape(), image.flat<T>());
AddInputFromArray<T>(filter.shape(), filter.flat<T>());
for (const Tensor& arg : args)
AddInputFromArray<T>(arg.shape(), arg.flat<T>());
if (!NativeFormatEnabled()) {
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
for (const Tensor& arg : args)
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
}
TF_ASSERT_OK(RunOpKernel());
// Compare output to expected results
const Tensor& output_tensor = *GetOutput(0);
CommonTestUtilities<T> test_util;
if (!NativeFormatEnabled()) {
// Index 2 will need to be changed if the number of outputs produced
// by MklConv2D change.
const Tensor& output_meta_tensor = *GetOutput(2);
test_util.PerformConversion(dtype, output_tensor, output_meta_tensor,
output);
} else {
*output = output_tensor;
}
}
// Verifies computing unfused ops in a graph is identical to FusedConv2D.
void VerifyFusedConv2D(int filter_size, int filter_count,
const std::vector<string>& fused_ops,
int depth = kDepth, int image_width = kImageWidth,
int image_height = kImageHeight,
int image_batch_count = kImageBatchCount) {
const FusedGraphRunner run_default =
[this](const Tensor& input_data, const Tensor& filter_data,
const Tensor& bias_data, const std::vector<string>& fused_ops,
Tensor* out) {
RunConv2DUnfused(input_data, filter_data, bias_data, fused_ops, out);
};
const FusedGraphRunner run_fused =
[this](const Tensor& input_data, const Tensor& filter_data,
const Tensor& bias_data, const std::vector<string>& fused_ops,
Tensor* out) {
std::vector<Tensor> fused_input = {bias_data};
if (std::find(fused_ops.begin(), fused_ops.end(), "Add") !=
fused_ops.end()) {
fused_input.push_back(input_data);
}
RunMklFusedConv2DOp(input_data, filter_data, fused_input, fused_ops,
out);
};
const int bias_size = filter_count;
CommonTestUtilities<T>::VerifyFusedTensorsClose(
depth, image_width, image_height, image_batch_count, filter_size,
filter_count, bias_size, fused_ops, run_default, run_fused);
}
};
template <typename T>
class MklFusedConv2DWithBiasOpTest : public MklFusedConv2DOpTest<T> {};
TYPED_TEST_SUITE_P(MklFusedConv2DWithBiasOpTest);
// -------------------------------------------------------------------------- //
// Conv2D + BiasAdd + {Activation} //
// -------------------------------------------------------------------------- //
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, OneByOneConvolution) {
const int kFilterSize = 1;
const int kFilterCount = 12;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, SpatialConvolution) {
const int kFilterSize = 3;
const int kFilterCount = 12;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, OneByOneConvolutionAndRelu) {
const int kFilterSize = 1;
const int kFilterCount = 12;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd", "Relu"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, SpatialConvolutionAndRelu) {
const int kFilterSize = 3;
const int kFilterCount = 12;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd", "Relu"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, OneByOneConvolutionAndRelu6) {
const int kFilterSize = 1;
const int kFilterCount = 12;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd", "Relu6"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, SpatialConvolutionAndRelu6) {
const int kFilterSize = 3;
const int kFilterCount = 12;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd", "Relu6"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, OneByOneConvolutionAndElu) {
const int kFilterSize = 1;
const int kFilterCount = 12;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd", "Elu"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, SpatialConvolutionAndElu) {
const int kFilterSize = 3;
const int kFilterCount = 12;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd", "Elu"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, OneByOneConvolutionAndLeakyRelu) {
const int kFilterSize = 1;
const int kFilterCount = 12;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd", "LeakyRelu"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, SpatialConvolutionAndLeakyRelu) {
const int kFilterSize = 3;
const int kFilterCount = 12;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd", "LeakyRelu"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, OneByOneConvolutionAndAdd) {
const int kFilterSize = 1;
const int kFilterCount = 3;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd", "Add"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, SpatialConvolutionAndAdd) {
const int kFilterSize = 3;
const int kFilterCount = 3;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd", "Add"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, OneByOneConvolutionAndAddRelu) {
const int kFilterSize = 1;
const int kFilterCount = 3;
this->VerifyFusedConv2D(kFilterSize, kFilterCount,
{"BiasAdd", "Add", "Relu"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, SpatialConvolutionAndAddRelu) {
const int kFilterSize = 3;
const int kFilterCount = 3;
this->VerifyFusedConv2D(kFilterSize, kFilterCount,
{"BiasAdd", "Add", "Relu"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, OneByOneConvolutionAndAddRelu6) {
const int kFilterSize = 1;
const int kFilterCount = 3;
this->VerifyFusedConv2D(kFilterSize, kFilterCount,
{"BiasAdd", "Add", "Relu6"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, SpatialConvolutionAndAddRelu6) {
const int kFilterSize = 3;
const int kFilterCount = 3;
this->VerifyFusedConv2D(kFilterSize, kFilterCount,
{"BiasAdd", "Add", "Relu6"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, OneByOneConvolutionAndAddElu) {
const int kFilterSize = 1;
const int kFilterCount = 3;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd", "Add", "Elu"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, SpatialConvolutionAndAddElu) {
const int kFilterSize = 3;
const int kFilterCount = 3;
this->VerifyFusedConv2D(kFilterSize, kFilterCount, {"BiasAdd", "Add", "Elu"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, OneByOneConvolutionAndAddLeakyRelu) {
const int kFilterSize = 1;
const int kFilterCount = 3;
this->VerifyFusedConv2D(kFilterSize, kFilterCount,
{"BiasAdd", "Add", "LeakyRelu"});
}
TYPED_TEST_P(MklFusedConv2DWithBiasOpTest, SpatialConvolutionAndAddLeakyRelu) {
const int kFilterSize = 3;
const int kFilterCount = 3;
this->VerifyFusedConv2D(kFilterSize, kFilterCount,
{"BiasAdd", "Add", "LeakyRelu"});
}
REGISTER_TYPED_TEST_SUITE_P(
MklFusedConv2DWithBiasOpTest, OneByOneConvolution, SpatialConvolution,
OneByOneConvolutionAndRelu, SpatialConvolutionAndRelu,
OneByOneConvolutionAndRelu6, SpatialConvolutionAndRelu6,
OneByOneConvolutionAndElu, SpatialConvolutionAndElu,
OneByOneConvolutionAndLeakyRelu, SpatialConvolutionAndLeakyRelu,
OneByOneConvolutionAndAdd, SpatialConvolutionAndAdd,
OneByOneConvolutionAndAddRelu, SpatialConvolutionAndAddRelu,
OneByOneConvolutionAndAddRelu6, SpatialConvolutionAndAddRelu6,
OneByOneConvolutionAndAddElu, SpatialConvolutionAndAddElu,
OneByOneConvolutionAndAddLeakyRelu, SpatialConvolutionAndAddLeakyRelu);
using MklFusedBiasAddDataTypes = ::testing::Types<float>;
INSTANTIATE_TYPED_TEST_SUITE_P(Test, MklFusedConv2DWithBiasOpTest,
MklFusedBiasAddDataTypes);
// Testing MKL's fused depthwise convolution ops
template <typename T>
class MklFusedDepthwiseConv2DOpTest : public OpsTestBase {
protected:
static constexpr int kDepth = 3;
static constexpr int kImageWidth = 32;
static constexpr int kImageHeight = 32;
static constexpr int kImageBatchCount = 8;
void RunDepthwiseConv2DUnfused(const Tensor& input_data,
const Tensor& filter_data,
const Tensor& bias_data,
const std::vector<string>& fused_ops,
Tensor* output, int stride = 1) {
auto root = tensorflow::Scope::NewRootScope();
auto input_data_op =
ops::Const(root.WithOpName("input"), Input::Initializer(input_data));
Output next_op = ops::DepthwiseConv2dNative(
root.WithOpName("depthwise_conv"), input_data_op,
ops::Const(root.WithOpName("filter"), Input::Initializer(filter_data)),
{1, stride, stride, 1}, "SAME");
string last_op = "";
if (std::find(fused_ops.begin(), fused_ops.end(), "BiasAdd") !=
fused_ops.end()) {
last_op = "with_bias";
next_op = ops::BiasAdd(
root.WithOpName(last_op), next_op,
ops::Const(root.WithOpName("bias"), Input::Initializer(bias_data)));
}
if (std::find(fused_ops.begin(), fused_ops.end(), "Relu") !=
fused_ops.end()) {
last_op = "with_relu";
next_op = ops::Relu(root.WithOpName(last_op), next_op);
}
if (std::find(fused_ops.begin(), fused_ops.end(), "Relu6") !=
fused_ops.end()) {
last_op = "with_relu6";
next_op = ops::Relu6(root.WithOpName(last_op), next_op);
}
if (std::find(fused_ops.begin(), fused_ops.end(), "Elu") !=
fused_ops.end()) {
last_op = "with_elu";
next_op = ops::Elu(root.WithOpName(last_op), next_op);
}
CommonTestUtilities<T>::RunAndFetch(root, last_op, output);
}
void RunMklFusedDepthwiseConv2DOp(const Tensor& image, const Tensor& filter,
const std::vector<Tensor>& args,
const std::vector<string>& fused_ops,
Tensor* output, int stride = 1) {
DataType dtype = DataTypeToEnum<T>::v();
int num_args = static_cast<int>(args.size());
if (!NativeFormatEnabled()) {
TF_EXPECT_OK(NodeDefBuilder("fused_depthwise_conv_op",
"_MklFusedDepthwiseConv2dNative")
.Input(FakeInput(dtype))
.Input(FakeInput(dtype))
.Input(FakeInput(num_args, dtype))
.Input(FakeInput(DT_UINT8))
.Input(FakeInput(DT_UINT8))
.Input(FakeInput(num_args, DT_UINT8))
.Attr("T", dtype)
.Attr("num_args", num_args)
.Attr("strides", {1, stride, stride, 1})
.Attr("padding", "SAME")
.Attr("fused_ops", fused_ops)
.Attr("_kernel", "MklLayoutDependentOp")
.Finalize(node_def()));
} else {
TF_EXPECT_OK(NodeDefBuilder("fused_depthwise_conv_op",
"_MklNativeFusedDepthwiseConv2dNative")
.Input(FakeInput(dtype))
.Input(FakeInput(dtype))
.Input(FakeInput(num_args, dtype))
.Attr("T", dtype)
.Attr("num_args", num_args)
.Attr("strides", {1, stride, stride, 1})
.Attr("padding", "SAME")
.Attr("fused_ops", fused_ops)
.Attr("_kernel", "MklNameChangeOp")
.Finalize(node_def()));
}
TF_EXPECT_OK(InitOp());
AddInputFromArray<T>(image.shape(), image.flat<T>());
AddInputFromArray<T>(filter.shape(), filter.flat<T>());
for (const Tensor& arg : args)
AddInputFromArray<T>(arg.shape(), arg.flat<T>());
if (!NativeFormatEnabled()) {
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
for (const Tensor& arg : args)
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
}
TF_ASSERT_OK(RunOpKernel());
// Compare output to expected results
const Tensor& output_tensor = *GetOutput(0);
CommonTestUtilities<T> test_util;
if (!NativeFormatEnabled()) {
// Index 2 will need to be changed if the number of outputs produced
// by MklDepthwiseConv2D change.
const Tensor& output_meta_tensor = *GetOutput(2);
test_util.PerformConversion(dtype, output_tensor, output_meta_tensor,
output);
} else {
*output = output_tensor;
}
}
// Verifies computing unfused ops in a graph is identical to
// FusedDepthwiseConv2D.
void VerifyFusedDepthwiseConv2D(int filter_size, int filter_count,
int bias_size,
const std::vector<string>& fused_ops,
int depth = kDepth,
int image_width = kImageWidth,
int image_height = kImageHeight,
int image_batch_count = kImageBatchCount) {
const FusedGraphRunner run_default =
[this](const Tensor& input_data, const Tensor& filter_data,
const Tensor& bias_data, const std::vector<string>& fused_ops,
Tensor* out) {
RunDepthwiseConv2DUnfused(input_data, filter_data, bias_data,
fused_ops, out);
};
const FusedGraphRunner run_fused =
[this](const Tensor& input_data, const Tensor& filter_data,
const Tensor& bias_data, const std::vector<string>& fused_ops,
Tensor* out) {
std::vector<Tensor> fused_input = {bias_data};
RunMklFusedDepthwiseConv2DOp(input_data, filter_data, fused_input,
fused_ops, out);
};
CommonTestUtilities<T>::VerifyFusedTensorsClose(
depth, image_width, image_height, image_batch_count, filter_size,
filter_count, bias_size, fused_ops, run_default, run_fused);
}
};
template <typename T>
class MklFusedDepthwiseConv2DWithBiasOpTest
: public MklFusedDepthwiseConv2DOpTest<T> {};
TYPED_TEST_SUITE_P(MklFusedDepthwiseConv2DWithBiasOpTest);
// -------------------------------------------------------------------------- //
// DepthwiseConv2D + BiasAdd + {Activation} //
// -------------------------------------------------------------------------- //
TYPED_TEST_P(MklFusedDepthwiseConv2DWithBiasOpTest, OneByOneConvolution) {
const int kFilterSize = 1;
const int kFilterCount = 1;
const int kBiasSize = 3;
this->VerifyFusedDepthwiseConv2D(kFilterSize, kFilterCount, kBiasSize,
{"BiasAdd"});
}
TYPED_TEST_P(MklFusedDepthwiseConv2DWithBiasOpTest, SpatialConvolution) {
const int kFilterSize = 3;
const int kFilterCount = 1;
const int kBiasSize = 3;
this->VerifyFusedDepthwiseConv2D(kFilterSize, kFilterCount, kBiasSize,
{"BiasAdd"});
}
TYPED_TEST_P(MklFusedDepthwiseConv2DWithBiasOpTest,
OneByOneConvolutionAndRelu) {
const int kFilterSize = 1;
const int kFilterCount = 1;
const int kBiasSize = 3;
this->VerifyFusedDepthwiseConv2D(kFilterSize, kFilterCount, kBiasSize,
{"BiasAdd", "Relu"});
}
TYPED_TEST_P(MklFusedDepthwiseConv2DWithBiasOpTest, SpatialConvolutionAndRelu) {
const int kFilterSize = 3;
const int kFilterCount = 1;
const int kBiasSize = 3;
this->VerifyFusedDepthwiseConv2D(kFilterSize, kFilterCount, kBiasSize,
{"BiasAdd", "Relu"});
}
TYPED_TEST_P(MklFusedDepthwiseConv2DWithBiasOpTest,
OneByOneConvolutionAndRelu6) {
const int kFilterSize = 1;
const int kFilterCount = 1;
const int kBiasSize = 3;
this->VerifyFusedDepthwiseConv2D(kFilterSize, kFilterCount, kBiasSize,
{"BiasAdd", "Relu6"});
}
TYPED_TEST_P(MklFusedDepthwiseConv2DWithBiasOpTest,
SpatialConvolutionAndRelu6) {
const int kFilterSize = 3;
const int kFilterCount = 1;
const int kBiasSize = 3;
this->VerifyFusedDepthwiseConv2D(kFilterSize, kFilterCount, kBiasSize,
{"BiasAdd", "Relu6"});
}
TYPED_TEST_P(MklFusedDepthwiseConv2DWithBiasOpTest, OneByOneConvolutionAndElu) {
const int kFilterSize = 1;
const int kFilterCount = 1;
const int kBiasSize = 3;
this->VerifyFusedDepthwiseConv2D(kFilterSize, kFilterCount, kBiasSize,
{"BiasAdd", "Elu"});
}
TYPED_TEST_P(MklFusedDepthwiseConv2DWithBiasOpTest, SpatialConvolutionAndElu) {
const int kFilterSize = 3;
const int kFilterCount = 1;
const int kBiasSize = 3;
this->VerifyFusedDepthwiseConv2D(kFilterSize, kFilterCount, kBiasSize,
{"BiasAdd", "Elu"});
}
REGISTER_TYPED_TEST_SUITE_P(
MklFusedDepthwiseConv2DWithBiasOpTest, OneByOneConvolution,
SpatialConvolution, OneByOneConvolutionAndRelu, SpatialConvolutionAndRelu,
OneByOneConvolutionAndRelu6, SpatialConvolutionAndRelu6,
OneByOneConvolutionAndElu, SpatialConvolutionAndElu);
using MklFusedBiasAddDataTypes = ::testing::Types<float>;
INSTANTIATE_TYPED_TEST_SUITE_P(Test, MklFusedDepthwiseConv2DWithBiasOpTest,
MklFusedBiasAddDataTypes);
// Testing fusion of pad and convolution
template <typename T>
class FusedPadConvOpTest : public OpsTestBase {
public:
void Run(const string data_format) {
DataType dtype = DataTypeToEnum<T>::v();
const int depth = 1;
const int image_width = 4;
const int image_height = 3;
const int image_batch_count = 1;
const int stride = 1;
Tensor image, expected;
if (data_format == "NHWC") {
image =
Tensor(dtype, {image_batch_count, image_height, image_width, depth});
} else {
image =
Tensor(dtype, {image_batch_count, depth, image_height, image_width});
}
test::FillValues<T>(&image, {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12});
const int kFilterSize = 3;
const int kFilterCount = 1;
Tensor filter(dtype, {kFilterSize, kFilterSize, depth, kFilterCount});
test::FillValues<T>(&filter, {1, 4, 7, 2, 5, 8, 3, 6, 9});
const int padding_height = 4;
const int padding_width = 2;
Tensor padding(DT_INT32, {padding_height, padding_width});
if (data_format == "NHWC") {
test::FillValues<int32>(&padding, {0, 0, 3, 4, 1, 2, 0, 0});
} else {
test::FillValues<int32>(&padding, {0, 0, 0, 0, 3, 4, 1, 2});
}
if (data_format == "NHWC") {
expected = Tensor(dtype, TensorShape({1, 8, 5, 1}));
} else {
expected = Tensor(dtype, TensorShape({1, 1, 8, 5}));
}
test::FillValues<T>(
&expected,
{0, 0, 0, 0, 0, 24, 42, 60, 33, 12, 105, 150, 183, 95,
32, 235, 312, 357, 178, 56, 187, 234, 261, 121, 32, 106, 126, 138,
59, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0});
// Create a fused pad+conv2d node
if (!NativeFormatEnabled()) {
TF_EXPECT_OK(NodeDefBuilder("fused_pad_conv_op", "_MklPadWithConv2D")
.Input(FakeInput(dtype)) // Input
.Input(FakeInput(dtype)) // Filter
.Input(FakeInput(DT_INT32)) // Padding
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Attr("padding", "VALID")
.Attr("data_format", data_format)
.Attr("T", dtype)
.Attr("strides", {1, stride, stride, 1})
.Attr("_kernel", "MklLayoutDependentOp")
.Finalize(node_def()));
} else {
TF_EXPECT_OK(
NodeDefBuilder("fused_pad_conv_op", "_MklNativePadWithConv2D")
.Input(FakeInput(dtype)) // Input
.Input(FakeInput(dtype)) // Filter
.Input(FakeInput(DT_INT32)) // Padding
.Attr("padding", "VALID")
.Attr("data_format", data_format)
.Attr("T", dtype)
.Attr("strides", {1, stride, stride, 1})
.Attr("_kernel", "MklNameChangeOp")
.Finalize(node_def()));
}
TF_EXPECT_OK(InitOp());
// Setting up inputs and execute
AddInputFromArray<T>(image.shape(), image.flat<T>());
AddInputFromArray<T>(filter.shape(), filter.flat<T>());
AddInputFromArray<int32>(padding.shape(), padding.flat<int32>());
if (!NativeFormatEnabled()) {
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
}
TF_ASSERT_OK(RunOpKernel());
// Compare output to expected results
const Tensor& first = *GetOutput(0);
CommonTestUtilities<T> test_util;
if (!NativeFormatEnabled()) {
const Tensor& second = *GetOutput(2);
test_util.ConvertAndCompareIntegral(dtype, first, second, expected);
} else {
test::ExpectTensorEqual<T>(expected, first);
}
}
};
TYPED_TEST_SUITE_P(FusedPadConvOpTest);
TYPED_TEST_P(FusedPadConvOpTest, PaddingConvTest) { this->Run("NHWC"); }
TYPED_TEST_P(FusedPadConvOpTest, PaddingConvTestNchw) { this->Run("NCHW"); }
REGISTER_TYPED_TEST_SUITE_P(FusedPadConvOpTest, PaddingConvTest,
PaddingConvTestNchw);
#ifdef ENABLE_INTEL_MKL_BFLOAT16
using FusedPadConvDataTypes = ::testing::Types<float, bfloat16>;
#else
using FusedPadConvDataTypes = ::testing::Types<float>;
#endif
INSTANTIATE_TYPED_TEST_SUITE_P(Test, FusedPadConvOpTest, FusedPadConvDataTypes);
class FilterCacheTest : public OpsTestBase {
public:
template <typename T>
void Run(DataType dtype, Tensor& image, Tensor& filter, Tensor& expected,
const bool is_filter_const) {
const int stride = 1;
if (!NativeFormatEnabled()) {
TF_EXPECT_OK(NodeDefBuilder("conv2d_filter_cache", "_MklConv2D")
.Input(FakeInput(dtype)) // Input
.Input(FakeInput(dtype)) // Filter
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Attr("padding", "VALID")
.Attr("data_format", "NHWC")
.Attr("is_filter_const", is_filter_const)
.Attr("T", dtype)
.Attr("strides", {1, stride, stride, 1})
.Attr("_kernel", "MklLayoutDependentOp")
.Finalize(node_def()));
} else {
TF_EXPECT_OK(NodeDefBuilder("conv2d_filter_cache", "_MklNativeConv2D")
.Input(FakeInput(dtype)) // Input
.Input(FakeInput(dtype)) // Filter
.Attr("padding", "VALID")
.Attr("data_format", "NHWC")
.Attr("is_filter_const", is_filter_const)
.Attr("T", dtype)
.Attr("strides", {1, stride, stride, 1})
.Attr("_kernel", "MklNameChangeOp")
.Finalize(node_def()));
}
TF_EXPECT_OK(InitOp());
// Setting up inputs and execute
AddInputFromArray<T>(image.shape(), image.flat<T>());
AddInputFromArray<T>(filter.shape(), filter.flat<T>());
if (!NativeFormatEnabled()) {
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
}
TF_ASSERT_OK(RunOpKernel());
// Compare outputs to expected results
const Tensor& output = *GetOutput(0);
CommonTestUtilities<T> conv_comp;
if (!NativeFormatEnabled()) {
const Tensor& output_layout = *GetOutput(2);
conv_comp.ConvertAndCompare(dtype, output, output_layout, expected);
} else {
test::ExpectTensorEqual<T>(expected, output);
}
// TODO(bhavanis): For now, we rely on internal performance tests to
// determine if filter data is being cached and reused.
// However, we still need to add a check here to determine if this is
// still the case by inspecting the contents of the persistent tensor.
TF_ASSERT_OK(RunOpKernel());
// Compare output to expected results
const Tensor& output_new = *GetOutput(0);
CommonTestUtilities<T> conv_comp_new;
if (!NativeFormatEnabled()) {
const Tensor& output_layout_new = *GetOutput(2);
conv_comp_new.ConvertAndCompare(dtype, output_new, output_layout_new,
expected);
} else {
test::ExpectTensorEqual<T>(expected, output_new);
}
}
};
TEST_F(FilterCacheTest, Conv2DFilterCacheTest) {
const int depth = 1;
const int image_width = 4;
const int image_height = 3;
const int image_batch_count = 1;
Tensor image(DT_FLOAT, {image_batch_count, image_height, image_width, depth});
test::FillValues<float>(&image, {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12});
const int kFilterSize = 3;
const int kFilterCount = 1;
Tensor filter(DT_FLOAT, {kFilterSize, kFilterSize, depth, kFilterCount});
test::FillValues<float>(&filter, {1, 4, 7, 2, 5, 8, 3, 6, 9});
Tensor expected(DT_FLOAT, TensorShape({1, 1, 2, 1}));
test::FillValues<float>(&expected, {312, 357});
Run<float>(DT_FLOAT, image, filter, expected, true);
}
// Testing fusion of MatMul and BiasAdd
template <typename T>
class MklFusedMatMulOpTest : public OpsTestBase {
protected:
void VerifyFusedMatMul(const int kBatch, const int kInputChannel,
const int kOutputChannel,
const std::vector<string>& fused_ops) {
const FusedGraphRunner run_default =
[this](const Tensor& input, const Tensor& weight, const Tensor& bias,
const std::vector<string>& fused_ops, Tensor* output) {
auto root = tensorflow::Scope::NewRootScope();
auto input_op =
ops::Const(root.WithOpName("input"), Input::Initializer(input));
Output next_op = ops::MatMul(root.WithOpName("matmul"), input_op,
ops::Const(root.WithOpName("weight"),
Input::Initializer(weight)));
string last_op = "";
if (std::find(fused_ops.begin(), fused_ops.end(), "BiasAdd") !=
fused_ops.end()) {
last_op = "with_bias";
next_op = ops::BiasAdd(
root.WithOpName(last_op), next_op,
ops::Const(root.WithOpName("bias"), Input::Initializer(bias)));
}
if (std::find(fused_ops.begin(), fused_ops.end(), "Relu") !=
fused_ops.end()) {
last_op = "with_relu";
next_op = ops::Relu(root.WithOpName(last_op), next_op);
}
if (std::find(fused_ops.begin(), fused_ops.end(), "Relu6") !=
fused_ops.end()) {
last_op = "with_relu6";
next_op = ops::Relu6(root.WithOpName(last_op), next_op);
}
if (std::find(fused_ops.begin(), fused_ops.end(), "Elu") !=
fused_ops.end()) {
last_op = "with_elu";
next_op = ops::Elu(root.WithOpName(last_op), next_op);
}
if (std::find(fused_ops.begin(), fused_ops.end(), "Tanh") !=
fused_ops.end()) {
last_op = "with_tanh";
next_op = ops::Tanh(root.WithOpName(last_op), next_op);
}
CommonTestUtilities<T>::RunAndFetch(root, last_op, output);
};
const FusedGraphRunner run_fused =
[this](const Tensor& input, const Tensor& weight, const Tensor& bias,
const std::vector<string>& fused_ops, Tensor* output) {
DataType dtype = DataTypeToEnum<T>::v();
const int num_args = 1;
if (!NativeFormatEnabled()) {
TF_EXPECT_OK(NodeDefBuilder("MklFusedMatMul", "_MklFusedMatMul")
.Input(FakeInput(dtype))
.Input(FakeInput(dtype))
.Input(FakeInput(num_args, dtype))
.Input(FakeInput(DT_UINT8))
.Input(FakeInput(DT_UINT8))
.Input(FakeInput(num_args, DT_UINT8))
.Attr("T", dtype)
.Attr("transpose_a", false)
.Attr("transpose_b", false)
.Attr("num_args", num_args)
.Attr("fused_ops", fused_ops)
.Attr("epsilon", 0.0001)
.Attr("_kernel", "MklLayoutDependentOp")
.Finalize(node_def()));
} else {
TF_EXPECT_OK(
NodeDefBuilder("MklFusedMatMul", "_MklNativeFusedMatMul")
.Input(FakeInput(dtype))
.Input(FakeInput(dtype))
.Input(FakeInput(num_args, dtype))
.Attr("T", dtype)
.Attr("transpose_a", false)
.Attr("transpose_b", false)
.Attr("num_args", num_args)
.Attr("fused_ops", fused_ops)
.Attr("epsilon", 0.0001)
.Attr("_kernel", "MklNameChangeOp")
.Finalize(node_def()));
}
TF_EXPECT_OK(InitOp());
AddInputFromArray<T>(input.shape(), input.flat<T>());
AddInputFromArray<T>(weight.shape(), weight.flat<T>());
AddInputFromArray<T>(bias.shape(), bias.flat<T>());
if (!NativeFormatEnabled()) {
// Add MKL meta input for input, filter and bias.
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
}
TF_ASSERT_OK(RunOpKernel());
const Tensor& output_tensor = *GetOutput(0);
if (!NativeFormatEnabled()) {
const Tensor& output_meta_tensor = *GetOutput(1);
CommonTestUtilities<T> test_util;
test_util.PerformConversion(dtype, output_tensor,
output_meta_tensor, output);
} else {
*output = output_tensor;
}
};
CommonTestUtilities<T>::VerifyFusedMatrixClose(kInputChannel, kBatch,
kOutputChannel, fused_ops,
run_default, run_fused);
}
};
TYPED_TEST_SUITE_P(MklFusedMatMulOpTest);
TYPED_TEST_P(MklFusedMatMulOpTest, WithBias) {
const int batch = 3;
const int input_channel = 4;
const int output_channel = 5;
this->VerifyFusedMatMul(batch, input_channel, output_channel, {"BiasAdd"});
}
TYPED_TEST_P(MklFusedMatMulOpTest, WithBiasAndRelu) {
const int batch = 3;
const int input_channel = 4;
const int output_channel = 5;
this->VerifyFusedMatMul(batch, input_channel, output_channel,
{"BiasAdd", "Relu"});
}
TYPED_TEST_P(MklFusedMatMulOpTest, WithBiasAndRelu6) {
const int batch = 3;
const int input_channel = 4;
const int output_channel = 5;
this->VerifyFusedMatMul(batch, input_channel, output_channel,
{"BiasAdd", "Relu6"});
}
TYPED_TEST_P(MklFusedMatMulOpTest, WithBiasAndElu) {
const int batch = 3;
const int input_channel = 4;
const int output_channel = 5;
this->VerifyFusedMatMul(batch, input_channel, output_channel,
{"BiasAdd", "Elu"});
}
TYPED_TEST_P(MklFusedMatMulOpTest, WithBiasAndTanh) {
const int batch = 3;
const int input_channel = 4;
const int output_channel = 5;
this->VerifyFusedMatMul(batch, input_channel, output_channel,
{"BiasAdd", "Tanh"});
}
REGISTER_TYPED_TEST_SUITE_P(MklFusedMatMulOpTest, //
WithBias, //
WithBiasAndRelu, //
WithBiasAndRelu6, //
WithBiasAndElu, //
WithBiasAndTanh);
using MklFusedMatMulDataTypes = ::testing::Types<float>;
INSTANTIATE_TYPED_TEST_SUITE_P(Test, MklFusedMatMulOpTest,
MklFusedMatMulDataTypes);
// Test the performance of MklFusedMatMul weight cache.
// For the first time B matrix will be reordered and cached which will be
// used for subsequent runs
class MklFusedMatMulCacheTest : public OpsTestBase {};
TEST_F(MklFusedMatMulCacheTest, WeightCached) {
const int num_args = 1;
const std::vector<string>& fused_ops = {"BiasAdd"};
if (!NativeFormatEnabled()) {
TF_ASSERT_OK(NodeDefBuilder("MklFusedMatMul", "_MklFusedMatMul")
.Input(FakeInput(DT_FLOAT))
.Input(FakeInput(DT_FLOAT))
.Input(FakeInput(num_args, DT_FLOAT))
.Input(FakeInput(DT_UINT8))
.Input(FakeInput(DT_UINT8))
.Input(FakeInput(num_args, DT_UINT8))
.Attr("T", DT_FLOAT)
.Attr("transpose_a", false)
.Attr("transpose_b", false)
.Attr("num_args", num_args)
.Attr("fused_ops", fused_ops)
.Attr("epsilon", 0.0001)
.Attr("_kernel", "MklLayoutDependentOp")
.Finalize(node_def()));
} else {
TF_ASSERT_OK(NodeDefBuilder("MklFusedMatMul", "_MklNativeFusedMatMul")
.Input(FakeInput(DT_FLOAT))
.Input(FakeInput(DT_FLOAT))
.Input(FakeInput(num_args, DT_FLOAT))
.Attr("T", DT_FLOAT)
.Attr("transpose_a", false)
.Attr("transpose_b", false)
.Attr("num_args", num_args)
.Attr("fused_ops", fused_ops)
.Attr("epsilon", 0.0001)
.Attr("_kernel", "MklNameChangeOp")
.Finalize(node_def()));
}
TF_EXPECT_OK(InitOp());
// The tensor shape of (1,3) is selected to allow the mkldnn expected
// weight format to be made as OI rather than IO for BS > 1
// A matrix is:
// | 1 | 2 | 3 |
AddInputFromArray<float>(TensorShape({1, 3}), {1, 2, 3});
// B matrix is:
// | 7 | 8 | 9 | 10 |
// | 11 | 12 | 13 | 14 |
// | 15 | 16 | 17 | 18 |
AddInputFromArray<float>(TensorShape({3, 4}),
{7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18});
// Bias vector.
AddInputFromArray<float>(TensorShape({4}), {1, 2, 3, 4});
if (!NativeFormatEnabled()) {
// Add MKL meta input for input, filter and bias.
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
}
int64 start_time = Env::Default()->NowMicros();
TF_ASSERT_OK(RunOpKernel());
int64 end_time = Env::Default()->NowMicros();
int64 total_duration_unopt = end_time - start_time;
// Final result after Bias addition:
// | 75 | 82 | 89 | 96 |
Tensor expected(DT_FLOAT, TensorShape({1, 4}));
test::FillValues<float>(&expected, {75, 82, 89, 96});
const Tensor& output = *GetOutput(0);
CommonTestUtilities<float> test_util;
if (!NativeFormatEnabled()) {
const Tensor& mkl_shape_tensor = *GetOutput(1);
test_util.ConvertAndCompare(DT_FLOAT, output, mkl_shape_tensor, expected);
} else {
test::ExpectTensorNear<float>(expected, output, 1e-5);
}
// Test for the second time to use the cached weight
start_time = Env::Default()->NowMicros();
TF_ASSERT_OK(RunOpKernel());
end_time = Env::Default()->NowMicros();
int64 total_duration_opt = end_time - start_time;
LOG(INFO) << " Time taken by first call : " << total_duration_unopt
<< ", Time taken after Caching : " << total_duration_opt;
// Cached call should be at least 20% faster.
EXPECT_LT(total_duration_opt, total_duration_unopt * 0.8);
// Compare the result with expected result
CommonTestUtilities<float> test_util_new;
const Tensor& output_new = *GetOutput(0);
if (!NativeFormatEnabled()) {
const Tensor& mkl_shape_tensor_new = *GetOutput(1);
test_util_new.ConvertAndCompare(DT_FLOAT, output_new, mkl_shape_tensor_new,
expected);
} else {
test::ExpectTensorNear<float>(expected, output_new, 1e-5);
}
}
class BiasCacheTest : public OpsTestBase {
public:
template <typename T>
void Run(DataType dtype, Tensor& image, Tensor& filter, Tensor& bias,
Tensor& min_input, Tensor& max_input, Tensor& min_filter,
Tensor& max_filter, Tensor& min_output, Tensor& max_output,
Tensor& expected, const bool is_filter_const) {
const int stride = 1;
TF_EXPECT_OK(
NodeDefBuilder("quantized_conv2d_bias_cache",
"_MklQuantizedConv2DWithBiasAndReluAndRequantize")
.Input(FakeInput(dtype)) // Input
.Input(FakeInput(DT_QINT8)) // Filter
.Input(FakeInput(DT_FLOAT)) // Bias
.Input(FakeInput(DT_FLOAT)) // Min-input
.Input(FakeInput(DT_FLOAT)) // Max-input
.Input(FakeInput(DT_FLOAT)) // Min-filter
.Input(FakeInput(DT_FLOAT)) // Max-filter
.Input(FakeInput(DT_FLOAT)) // Min-output
.Input(FakeInput(DT_FLOAT)) // Max-output
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Input(FakeInput(DT_UINT8)) // MKL second tensor
.Attr("Tinput", DT_QUINT8)
.Attr("Tfilter", DT_QINT8)
.Attr("Tbias", DT_FLOAT)
.Attr("T", DT_QINT8)
.Attr("out_type", DT_QUINT8)
.Attr("data_format", "NHWC")
.Attr("strides", {1, stride, stride, 1})
.Attr("is_filter_const", is_filter_const)
.Attr("is_bias_const", true)
.Attr("padding", "VALID")
.Attr("_kernel", "QuantizedMklOp")
.Finalize(node_def()));
TF_EXPECT_OK(InitOp());
// Setting up inputs and execute
AddInputFromArray<quint8>(image.shape(), image.flat<quint8>());
AddInputFromArray<qint8>(filter.shape(), filter.flat<qint8>());
AddInputFromArray<float>(bias.shape(), bias.flat<float>());
AddInputFromArray<float>(min_input.shape(), min_input.flat<float>());
AddInputFromArray<float>(max_input.shape(), max_input.flat<float>());
AddInputFromArray<float>(min_filter.shape(), min_filter.flat<float>());
AddInputFromArray<float>(max_filter.shape(), max_filter.flat<float>());
AddInputFromArray<float>(min_output.shape(), min_output.flat<float>());
AddInputFromArray<float>(max_output.shape(), max_output.flat<float>());
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
TF_ASSERT_OK(RunOpKernel());
// Compare outputs to expected results
const Tensor& output = *GetOutput(0);
const Tensor& output_layout = *GetOutput(3);
CommonTestUtilities<quint8> conv_comp;
conv_comp.ConvertAndCompareIntegral(dtype, output, output_layout, expected);
// TODO(wenxi): For now, we rely on internal performance tests to
// determine if filter data is being cached and reused.
// However, we still need to add a check here to determine if this is
// still the case by inspecting the contents of the persistent tensor.
TF_ASSERT_OK(RunOpKernel());
// Compare output to expected results
const Tensor& output_new = *GetOutput(0);
const Tensor& output_layout_new = *GetOutput(3);
CommonTestUtilities<quint8> conv_comp_new;
conv_comp_new.ConvertAndCompareIntegral(dtype, output_new,
output_layout_new, expected);
}
};
TEST_F(BiasCacheTest, Conv2DBiasCacheTest) {
const int depth = 1;
const int image_width = 4;
const int image_height = 3;
const int image_batch_count = 1;
Tensor image(DT_QUINT8,
{image_batch_count, image_height, image_width, depth});
test::FillValues<quint8>(&image, {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12});
const int kFilterSize = 3;
const int kFilterCount = 1;
Tensor filter(DT_QINT8, {kFilterSize, kFilterSize, depth, kFilterCount});
test::FillValues<qint8>(&filter, {1, 4, 7, 2, 5, 8, 3, 6, 9});
Tensor bias(DT_FLOAT, {kFilterCount});
test::FillValues<float>(&bias, {1});
Tensor min_input(DT_FLOAT, {1});
test::FillValues<float>(&min_input, {1});
Tensor max_input(DT_FLOAT, {1});
test::FillValues<float>(&max_input, {1});
Tensor min_filter(DT_FLOAT, {1});
test::FillValues<float>(&min_filter, {1});
Tensor max_filter(DT_FLOAT, {1});
test::FillValues<float>(&max_filter, {1});
Tensor min_output(DT_FLOAT, {1});
test::FillValues<float>(&min_output, {1});
Tensor max_output(DT_FLOAT, {1});
test::FillValues<float>(&max_output, {1});
Tensor expected(DT_QUINT8, TensorShape({1, 1, 2, 1}));
test::FillValues<quint8>(&expected, {255, 255});
Run<float>(DT_QUINT8, image, filter, bias, min_input, max_input, min_filter,
max_filter, min_output, max_output, expected, true);
}
// Testing fusion of pad and fusedconv2d
template <typename T>
class MklPadWithFusedConv2DOpTest : public OpsTestBase {
protected:
static constexpr int kDepth = 3;
static constexpr int kImageWidth = 30;
static constexpr int kImageHeight = 28;
static constexpr int kImageBatchCount = 8;
// 0: top pad, 1: bottom pad, 2: left pad, 3: right pad
int padding_list_[4];
// Verifies that computing Pad+Conv2D+BiasAdd in a graph is identical to
// FusedConv2D.
void VerifyPadAndConv2DWithBias(int filter_size, int filter_count,
int depth = kDepth,
int image_width = kImageWidth,
int image_height = kImageHeight,
int image_batch_count = kImageBatchCount) {
const BiasAddGraphRunner run_default = [this](const Tensor& input_data,
const Tensor& filter_data,
const Tensor& bias_data,
Tensor* out) {
RunMklPadWithFusedConv2DAndBias(input_data, filter_data, bias_data, out);
};
const BiasAddGraphRunner run_fused =
[this](const Tensor& input_data, const Tensor& filter_data,
const Tensor& bias_data, Tensor* out) {
RunMklFusedConv2DWithPadOp(input_data, filter_data, {bias_data},
{"BiasAdd"}, out);
};
CommonTestUtilities<T>::VerifyBiasAddTensorsClose(
depth, image_width, image_height, image_batch_count, filter_size,
filter_count, run_default, run_fused);
}
// Verifies that computing Pad+Conv2D+BiasAdd+Relu in a graph is identical to
// FusedConv2D.
void VerifyPadAndConv2DWithBiasRelu(
int filter_size, int filter_count, int depth = kDepth,
int image_width = kImageWidth, int image_height = kImageHeight,
int image_batch_count = kImageBatchCount) {
const BiasAddGraphRunner run_default =
[this](const Tensor& input_data, const Tensor& filter_data,
const Tensor& bias_data, Tensor* out) {
RunMklPadWithFusedConv2DAndBiasRelu(input_data, filter_data,
bias_data, out);
};
const BiasAddGraphRunner run_fused =
[this](const Tensor& input_data, const Tensor& filter_data,
const Tensor& bias_data, Tensor* out) {
RunMklFusedConv2DWithPadOp(input_data, filter_data, {bias_data},
{"BiasAdd", "Relu"}, out);
};
CommonTestUtilities<T>::VerifyBiasAddTensorsClose(
depth, image_width, image_height, image_batch_count, filter_size,
filter_count, run_default, run_fused);
}
void RunMklPadWithFusedConv2DAndBias(const Tensor& input_data,
const Tensor& filter_data,
const Tensor& bias_data, Tensor* output,
int stride = 1) {
auto root = tensorflow::Scope::NewRootScope();
// FusedConv2D only supports NHWC format so we use NHWC here.
auto padding = ops::Const(root.WithOpName("padding"),
{0, 0, padding_list_[0], padding_list_[1],
padding_list_[2], padding_list_[3], 0, 0},
{4, 2});
auto pad = ops::Pad(
root.WithOpName("pad"),
ops::Const(root.WithOpName("input"), Input::Initializer(input_data)),
padding);
auto conv = ops::Conv2D(
root.WithOpName("conv"), pad,
ops::Const(root.WithOpName("filter"), Input::Initializer(filter_data)),
{1, stride, stride, 1}, "VALID");
auto with_bias = ops::BiasAdd(
root.WithOpName("with_bias"), conv,
ops::Const(root.WithOpName("bias"), Input::Initializer(bias_data)));
CommonTestUtilities<T>::RunAndFetch(root, "with_bias", output);
}
void RunMklPadWithFusedConv2DAndBiasRelu(const Tensor& input_data,
const Tensor& filter_data,
const Tensor& bias_data,
Tensor* output, int stride = 1) {
auto root = tensorflow::Scope::NewRootScope();
// FusedConv2D only supports NHWC format so we use NHWC here.
auto padding = ops::Const(root.WithOpName("padding"),
{0, 0, padding_list_[0], padding_list_[1],
padding_list_[2], padding_list_[3], 0, 0},
{4, 2});
auto pad = ops::Pad(
root.WithOpName("pad"),
ops::Const(root.WithOpName("input"), Input::Initializer(input_data)),
padding);
auto conv = ops::Conv2D(
root.WithOpName("conv"), pad,
ops::Const(root.WithOpName("filter"), Input::Initializer(filter_data)),
{1, stride, stride, 1}, "VALID");
auto with_bias = ops::BiasAdd(
root.WithOpName("with_bias"), conv,
ops::Const(root.WithOpName("bias"), Input::Initializer(bias_data)));
auto with_relu = ops::Relu(root.WithOpName("with_relu"), with_bias);
CommonTestUtilities<T>::RunAndFetch(root, "with_relu", output);
}
void RunMklFusedConv2DWithPadOp(const Tensor& image, const Tensor& filter,
const std::vector<Tensor>& args,
const std::vector<string>& fused_ops,
Tensor* output, int stride = 1) {
DataType dtype = DataTypeToEnum<T>::v();
const int num_args = static_cast<int>(args.size());
Tensor padding(DT_INT32, {4, 2});
test::FillValues<int32>(
&padding, {0, 0, padding_list_[0], padding_list_[1], padding_list_[2],
padding_list_[3], 0, 0});
if (!NativeFormatEnabled()) {
TF_EXPECT_OK(NodeDefBuilder("pad_fused_conv_op", "_MklPadWithFusedConv2D")
.Input(FakeInput(dtype))
.Input(FakeInput(dtype))
.Input(FakeInput(num_args, dtype))
.Input(FakeInput(DT_INT32))
.Input(FakeInput(DT_UINT8))
.Input(FakeInput(DT_UINT8))
.Input(FakeInput(num_args, DT_UINT8))
.Input(FakeInput(DT_UINT8))
.Attr("T", dtype)
.Attr("num_args", num_args)
.Attr("strides", {1, stride, stride, 1})
.Attr("padding", "VALID")
.Attr("fused_ops", fused_ops)
.Attr("_kernel", "MklLayoutDependentOp")
.Finalize(node_def()));
} else {
TF_EXPECT_OK(
NodeDefBuilder("pad_fused_conv_op", "_MklNativePadWithFusedConv2D")
.Input(FakeInput(dtype))
.Input(FakeInput(dtype))
.Input(FakeInput(num_args, dtype))
.Input(FakeInput(DT_INT32))
.Attr("T", dtype)
.Attr("num_args", num_args)
.Attr("strides", {1, stride, stride, 1})
.Attr("padding", "VALID")
.Attr("fused_ops", fused_ops)
.Attr("_kernel", "MklNameChangeOp")
.Finalize(node_def()));
}
TF_EXPECT_OK(InitOp());
AddInputFromArray<T>(image.shape(), image.flat<T>());
AddInputFromArray<T>(filter.shape(), filter.flat<T>());
for (const Tensor& arg : args)
AddInputFromArray<T>(arg.shape(), arg.flat<T>());
AddInputFromArray<int32>(padding.shape(), padding.flat<int32>());
if (!NativeFormatEnabled()) {
// Add MKL meta input for input, filter, pad and agrs.
for (int i = 0; i < args.size() + 3; ++i)
AddInputFromArray<uint8>(dummy_shape, dummy_tensor);
}
TF_ASSERT_OK(RunOpKernel());
// Compare output to expected results
const Tensor& output_tensor = *GetOutput(0);
CommonTestUtilities<T> test_util;
if (!NativeFormatEnabled()) {
// Index 2 will need to be changed if the number of outputs produced
// by MklConv2D change.
const Tensor& output_meta_tensor = *GetOutput(2);
test_util.PerformConversion(dtype, output_tensor, output_meta_tensor,
output);
} else {
*output = output_tensor;
}
}
public:
void SetPaddingList(int top, int bottom, int left, int right) {
padding_list_[0] = top;
padding_list_[1] = bottom;
padding_list_[2] = left;
padding_list_[3] = right;
}
};
TYPED_TEST_SUITE_P(MklPadWithFusedConv2DOpTest);
TYPED_TEST_P(MklPadWithFusedConv2DOpTest, WithBiasAndRoundPad) {
const int kFilterSize = 1;
const int kFilterCount = 12;
this->SetPaddingList(2, 2, 1, 1);
this->VerifyPadAndConv2DWithBias(kFilterSize, kFilterCount);
}
TYPED_TEST_P(MklPadWithFusedConv2DOpTest, WithBiasAndPartialPad) {
const int kFilterSize = 1;
const int kFilterCount = 12;
this->SetPaddingList(4, 0, 2, 0);
this->VerifyPadAndConv2DWithBias(kFilterSize, kFilterCount);
}
TYPED_TEST_P(MklPadWithFusedConv2DOpTest, WithBiasReluAndRoundPad) {
const int kFilterSize = 1;
const int kFilterCount = 12;
this->SetPaddingList(2, 2, 1, 1);
this->VerifyPadAndConv2DWithBiasRelu(kFilterSize, kFilterCount);
}
TYPED_TEST_P(MklPadWithFusedConv2DOpTest, WithBiasReluAndPartialPad) {
const int kFilterSize = 1;
const int kFilterCount = 12;
this->SetPaddingList(4, 0, 2, 0);
this->VerifyPadAndConv2DWithBiasRelu(kFilterSize, kFilterCount);
}
REGISTER_TYPED_TEST_SUITE_P(MklPadWithFusedConv2DOpTest, //
WithBiasAndRoundPad, //
WithBiasAndPartialPad, //
WithBiasReluAndRoundPad, //
WithBiasReluAndPartialPad);
using MklPadWithFusedConv2DDataTypes = ::testing::Types<float>;
INSTANTIATE_TYPED_TEST_SUITE_P(Test, MklPadWithFusedConv2DOpTest,
MklPadWithFusedConv2DDataTypes);
} // namespace tensorflow
#endif // INTEL_MKL
| apache-2.0 |
shopiz/shopiz | src/app_helper/webroot/index.php | 943 | <?php
error_reporting(E_ALL);
try {
// 应用名称
define('APP_NAME', 'app_helper');
/**
* 环境
* 可选值:production, develop, test
*/
// define('ENVIRON', 'develop');
define("DS", DIRECTORY_SEPARATOR);
define("APP_PATH", dirname(__DIR__) . DS);
define("BASE_PATH", dirname(APP_PATH) . DS);
define("LIBRARY_PATH", BASE_PATH . "library" . DS);
define("DATA_PATH", BASE_PATH . "data" . DS);
/**
* Read the configuration
*/
$config = include APP_PATH . "config/config.php";
/**
* Read auto-loader
*/
include APP_PATH . "config/loader.php";
/**
* Read services
*/
include APP_PATH . "config/services.php";
$di->set('config', $config);
/**
* Handle the request
*/
$application = new \Phalcon\Mvc\Application($di);
echo $application->handle()->getContent();
} catch (\Exception $e) {
echo $e->getMessage();
}
| apache-2.0 |
SingingTree/rustfmt | tests/source/configs-closure_block_indent_threshold-2.rs | 255 | // rustfmt-closure_block_indent_threshold: 2
// Closure block indent threshold
fn main() {
lorem_ipsum(|| {
println!("lorem");
println!("ipsum");
println!("dolor");
println!("sit");
println!("amet");
});
}
| apache-2.0 |
wolfboys/opencron-dev | jobx-server/src/main/java/com/jobxhub/server/dto/Agent.java | 6487 | /**
* Copyright (c) 2015 The JobX Project
* <p>
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.jobxhub.server.dto;
import com.google.common.base.Function;
import com.jobxhub.server.domain.AgentBean;
import com.jobxhub.server.domain.UserBean;
import org.springframework.beans.BeanUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class Agent implements Serializable {
private Long agentId;
private String machineId;
//代理执行器的Id
private Long proxyId;
private String host;
//agent系统平台,0:其他,1:unix内核系统,2:window
private Integer platform;
private Integer port;
private String name;
private String password;
private Boolean warning;
private String email;
private String mobile;
private Integer status;//1通讯成功,0:失败失联,2:密码错误
private Date notifyTime;//失败后发送通知告警的时间
private String comment;
private Date updateTime;
private List<Group> groups = new ArrayList<Group>(0);
private Integer taskCount;
private List<UserBean> users = new ArrayList<UserBean>();
private Boolean proxy = false;//是否代理
public static Function<? super AgentBean, ? extends Agent> transfer = new Function<AgentBean, Agent>() {
@Override
public Agent apply(AgentBean input) {
return new Agent(input);
}
};
public Agent(){}
public Agent(AgentBean agent){
BeanUtils.copyProperties(agent,this);
}
public Long getAgentId() {
return agentId;
}
public void setAgentId(Long agentId) {
this.agentId = agentId;
}
public String getMachineId() {
return machineId;
}
public void setMachineId(String machineId) {
this.machineId = machineId;
}
public Long getProxyId() {
return proxyId;
}
public void setProxyId(Long proxyId) {
this.proxyId = proxyId;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public Integer getPlatform() {
return platform;
}
public void setPlatform(Integer platform) {
this.platform = platform;
}
public Integer getPort() {
return port;
}
public void setPort(Integer port) {
this.port = port;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public Boolean getWarning() {
return warning;
}
public void setWarning(Boolean warning) {
this.warning = warning;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getMobile() {
return mobile;
}
public void setMobile(String mobile) {
this.mobile = mobile;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public Date getNotifyTime() {
return notifyTime;
}
public void setNotifyTime(Date notifyTime) {
this.notifyTime = notifyTime;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
public List<Group> getGroups() {
return groups;
}
public void setGroups(List<Group> groups) {
this.groups = groups;
}
public Integer getTaskCount() {
return taskCount;
}
public void setTaskCount(Integer taskCount) {
this.taskCount = taskCount;
}
public List<UserBean> getUsers() {
return users;
}
public void setUsers(List<UserBean> users) {
this.users = users;
}
public Boolean getProxy() {
return proxy;
}
public void setProxy(Boolean proxy) {
this.proxy = proxy;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Agent agent = (Agent) o;
return getAgentId() != null ? getAgentId().equals(agent.getAgentId()) : agent.getAgentId() == null;
}
@Override
public int hashCode() {
return getAgentId() != null ? getAgentId().hashCode() : 0;
}
@Override
public String toString() {
return "Agent{" +
"agentId=" + agentId +
", machineId='" + machineId + '\'' +
", proxyId=" + proxyId +
", host='" + host + '\'' +
", platform=" + platform +
", port=" + port +
", name='" + name + '\'' +
", password='" + password + '\'' +
", warning=" + warning +
", email='" + email + '\'' +
", mobile='" + mobile + '\'' +
", status=" + status +
", notifyTime=" + notifyTime +
", comment='" + comment + '\'' +
", updateTime=" + updateTime +
", groups=" + groups +
", taskCount=" + taskCount +
", users=" + users +
", proxy=" + proxy +
'}';
}
}
| apache-2.0 |
BanzaiTokyo/akihabara-tokyo | askapp/backends.py | 641 | """Taken from https://github.com/jasonbeverage/django-token"""
from askapp.models import Token
from django.contrib.auth import get_user_model
User = get_user_model()
class TokenBackend(object):
def authenticate(self, request, token=None):
"""
Try to find a user with the given token
"""
try:
t = Token.objects.get(key=token)
return t.user
except Token.DoesNotExist:
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| apache-2.0 |
ConeyLiu/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeWriteCompatibilitySuite.scala | 21445 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.types
import scala.collection.mutable
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.catalyst.expressions.Cast
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.StoreAssignmentPolicy
class StrictDataTypeWriteCompatibilitySuite extends DataTypeWriteCompatibilityBaseSuite {
override def storeAssignmentPolicy: SQLConf.StoreAssignmentPolicy.Value =
StoreAssignmentPolicy.STRICT
override def canCast: (DataType, DataType) => Boolean = Cast.canUpCast
test("Check struct types: unsafe casts are not allowed") {
assertNumErrors(widerPoint2, point2, "t",
"Should fail because types require unsafe casts", 2) { errs =>
assert(errs(0).contains("'t.x'"), "Should include the nested field name context")
assert(errs(0).contains("Cannot safely cast"))
assert(errs(1).contains("'t.y'"), "Should include the nested field name context")
assert(errs(1).contains("Cannot safely cast"))
}
}
test("Check array types: unsafe casts are not allowed") {
val arrayOfLong = ArrayType(LongType)
val arrayOfInt = ArrayType(IntegerType)
assertSingleError(arrayOfLong, arrayOfInt, "arr",
"Should not allow array of longs to array of ints") { err =>
assert(err.contains("'arr.element'"),
"Should identify problem with named array's element type")
assert(err.contains("Cannot safely cast"))
}
}
test("Check map value types: casting Long to Integer is not allowed") {
val mapOfLong = MapType(StringType, LongType)
val mapOfInt = MapType(StringType, IntegerType)
assertSingleError(mapOfLong, mapOfInt, "m",
"Should not allow map of longs to map of ints") { err =>
assert(err.contains("'m.value'"), "Should identify problem with named map's value type")
assert(err.contains("Cannot safely cast"))
}
}
test("Check map key types: unsafe casts are not allowed") {
val mapKeyLong = MapType(LongType, StringType)
val mapKeyInt = MapType(IntegerType, StringType)
assertSingleError(mapKeyLong, mapKeyInt, "m",
"Should not allow map of long keys to map of int keys") { err =>
assert(err.contains("'m.key'"), "Should identify problem with named map's key type")
assert(err.contains("Cannot safely cast"))
}
}
test("Check NullType is incompatible with all other types") {
allNonNullTypes.foreach { t =>
assertSingleError(NullType, t, "nulls", s"Should not allow writing None to type $t") { err =>
assert(err.contains(s"incompatible with ${t.catalogString}"))
}
}
}
}
class ANSIDataTypeWriteCompatibilitySuite extends DataTypeWriteCompatibilityBaseSuite {
override protected def storeAssignmentPolicy: SQLConf.StoreAssignmentPolicy.Value =
StoreAssignmentPolicy.ANSI
override def canCast: (DataType, DataType) => Boolean = Cast.canANSIStoreAssign
test("Check map value types: unsafe casts are not allowed") {
val mapOfString = MapType(StringType, StringType)
val mapOfInt = MapType(StringType, IntegerType)
assertSingleError(mapOfString, mapOfInt, "m",
"Should not allow map of strings to map of ints") { err =>
assert(err.contains("'m.value'"), "Should identify problem with named map's value type")
assert(err.contains("Cannot safely cast"))
}
}
private val stringPoint2 = StructType(Seq(
StructField("x", StringType, nullable = false),
StructField("y", StringType, nullable = false)))
test("Check struct types: unsafe casts are not allowed") {
assertNumErrors(stringPoint2, point2, "t",
"Should fail because types require unsafe casts", 2) { errs =>
assert(errs(0).contains("'t.x'"), "Should include the nested field name context")
assert(errs(0).contains("Cannot safely cast"))
assert(errs(1).contains("'t.y'"), "Should include the nested field name context")
assert(errs(1).contains("Cannot safely cast"))
}
}
test("Check array types: unsafe casts are not allowed") {
val arrayOfString = ArrayType(StringType)
val arrayOfInt = ArrayType(IntegerType)
assertSingleError(arrayOfString, arrayOfInt, "arr",
"Should not allow array of strings to array of ints") { err =>
assert(err.contains("'arr.element'"),
"Should identify problem with named array's element type")
assert(err.contains("Cannot safely cast"))
}
}
test("Check map key types: unsafe casts are not allowed") {
val mapKeyString = MapType(StringType, StringType)
val mapKeyInt = MapType(IntegerType, StringType)
assertSingleError(mapKeyString, mapKeyInt, "m",
"Should not allow map of string keys to map of int keys") { err =>
assert(err.contains("'m.key'"), "Should identify problem with named map's key type")
assert(err.contains("Cannot safely cast"))
}
}
test("Conversions between timestamp and long are not allowed") {
assertSingleError(LongType, TimestampType, "longToTimestamp",
"Should not allow long to timestamp") { err =>
assert(err.contains("Cannot safely cast 'longToTimestamp': bigint to timestamp"))
}
assertSingleError(TimestampType, LongType, "timestampToLong",
"Should not allow timestamp to long") { err =>
assert(err.contains("Cannot safely cast 'timestampToLong': timestamp to bigint"))
}
}
test("Check NullType is compatible with all other types") {
allNonNullTypes.foreach { t =>
assertAllowed(NullType, t, "nulls", s"Should allow writing None to type $t")
}
}
}
abstract class DataTypeWriteCompatibilityBaseSuite extends SparkFunSuite {
protected def storeAssignmentPolicy: StoreAssignmentPolicy.Value
protected def canCast: (DataType, DataType) => Boolean
protected val atomicTypes = Seq(BooleanType, ByteType, ShortType, IntegerType, LongType,
FloatType, DoubleType, DateType, TimestampType, StringType, BinaryType)
protected val point2 = StructType(Seq(
StructField("x", FloatType, nullable = false),
StructField("y", FloatType, nullable = false)))
protected val widerPoint2 = StructType(Seq(
StructField("x", DoubleType, nullable = false),
StructField("y", DoubleType, nullable = false)))
protected val point3 = StructType(Seq(
StructField("x", FloatType, nullable = false),
StructField("y", FloatType, nullable = false),
StructField("z", FloatType)))
private val simpleContainerTypes = Seq(
ArrayType(LongType), ArrayType(LongType, containsNull = false), MapType(StringType, DoubleType),
MapType(StringType, DoubleType, valueContainsNull = false), point2, point3)
private val nestedContainerTypes = Seq(ArrayType(point2, containsNull = false),
MapType(StringType, point3, valueContainsNull = false))
protected val allNonNullTypes = Seq(
atomicTypes, simpleContainerTypes, nestedContainerTypes, Seq(CalendarIntervalType)).flatten
test("Check each type with itself") {
allNonNullTypes.foreach { t =>
assertAllowed(t, t, "t", s"Should allow writing type to itself $t")
}
}
test("Check atomic types: write allowed only when casting is safe") {
atomicTypes.foreach { w =>
atomicTypes.foreach { r =>
if (canCast(w, r)) {
assertAllowed(w, r, "t", s"Should allow writing $w to $r because cast is safe")
} else {
assertSingleError(w, r, "t",
s"Should not allow writing $w to $r because cast is not safe") { err =>
assert(err.contains("'t'"), "Should include the field name context")
assert(err.contains("Cannot safely cast"), "Should identify unsafe cast")
assert(err.contains(s"${w.catalogString}"), "Should include write type")
assert(err.contains(s"${r.catalogString}"), "Should include read type")
}
}
}
}
}
test("Check struct types: missing required field") {
val missingRequiredField = StructType(Seq(StructField("x", FloatType, nullable = false)))
assertSingleError(missingRequiredField, point2, "t",
"Should fail because required field 'y' is missing") { err =>
assert(err.contains("'t'"), "Should include the struct name for context")
assert(err.contains("'y'"), "Should include the nested field name")
assert(err.contains("missing field"), "Should call out field missing")
}
}
test("Check struct types: missing starting field, matched by position") {
val missingRequiredField = StructType(Seq(StructField("y", FloatType, nullable = false)))
// should have 2 errors: names x and y don't match, and field y is missing
assertNumErrors(missingRequiredField, point2, "t",
"Should fail because field 'x' is matched to field 'y' and required field 'y' is missing", 2)
{ errs =>
assert(errs(0).contains("'t'"), "Should include the struct name for context")
assert(errs(0).contains("expected 'x', found 'y'"), "Should detect name mismatch")
assert(errs(0).contains("field name does not match"), "Should identify name problem")
assert(errs(1).contains("'t'"), "Should include the struct name for context")
assert(errs(1).contains("'y'"), "Should include the _last_ nested fields of the read schema")
assert(errs(1).contains("missing field"), "Should call out field missing")
}
}
test("Check struct types: missing middle field, matched by position") {
val missingMiddleField = StructType(Seq(
StructField("x", FloatType, nullable = false),
StructField("z", FloatType, nullable = false)))
val expectedStruct = StructType(Seq(
StructField("x", FloatType, nullable = false),
StructField("y", FloatType, nullable = false),
StructField("z", FloatType, nullable = true)))
// types are compatible: (req int, req int) => (req int, req int, opt int)
// but this should still fail because the names do not match.
assertNumErrors(missingMiddleField, expectedStruct, "t",
"Should fail because field 'y' is matched to field 'z'", 2) { errs =>
assert(errs(0).contains("'t'"), "Should include the struct name for context")
assert(errs(0).contains("expected 'y', found 'z'"), "Should detect name mismatch")
assert(errs(0).contains("field name does not match"), "Should identify name problem")
assert(errs(1).contains("'t'"), "Should include the struct name for context")
assert(errs(1).contains("'z'"), "Should include the nested field name")
assert(errs(1).contains("missing field"), "Should call out field missing")
}
}
test("Check struct types: generic colN names are ignored") {
val missingMiddleField = StructType(Seq(
StructField("col1", FloatType, nullable = false),
StructField("col2", FloatType, nullable = false)))
val expectedStruct = StructType(Seq(
StructField("x", FloatType, nullable = false),
StructField("y", FloatType, nullable = false)))
// types are compatible: (req int, req int) => (req int, req int)
// names don't match, but match the naming convention used by Spark to fill in names
assertAllowed(missingMiddleField, expectedStruct, "t",
"Should succeed because column names are ignored")
}
test("Check struct types: required field is optional") {
val requiredFieldIsOptional = StructType(Seq(
StructField("x", FloatType),
StructField("y", FloatType, nullable = false)))
assertSingleError(requiredFieldIsOptional, point2, "t",
"Should fail because required field 'x' is optional") { err =>
assert(err.contains("'t.x'"), "Should include the nested field name context")
assert(err.contains("Cannot write nullable values to non-null field"))
}
}
test("Check struct types: data field would be dropped") {
assertSingleError(point3, point2, "t",
"Should fail because field 'z' would be dropped") { err =>
assert(err.contains("'t'"), "Should include the struct name for context")
assert(err.contains("'z'"), "Should include the extra field name")
assert(err.contains("Cannot write extra fields"))
}
}
test("Check struct types: type promotion is allowed") {
assertAllowed(point2, widerPoint2, "t",
"Should allow widening float fields x and y to double")
}
test("Check struct type: ignore field name mismatch with byPosition mode") {
val nameMismatchFields = StructType(Seq(
StructField("a", FloatType, nullable = false),
StructField("b", FloatType, nullable = false)))
assertAllowed(nameMismatchFields, point2, "t",
"Should allow field name mismatch with byPosition mode", false)
}
ignore("Check struct types: missing optional field is allowed") {
// built-in data sources do not yet support missing fields when optional
assertAllowed(point2, point3, "t",
"Should allow writing point (x,y) to point(x,y,z=null)")
}
test("Check array types: type promotion is allowed") {
val arrayOfLong = ArrayType(LongType)
val arrayOfInt = ArrayType(IntegerType)
assertAllowed(arrayOfInt, arrayOfLong, "arr",
"Should allow array of int written to array of long column")
}
test("Check array types: cannot write optional to required elements") {
val arrayOfRequired = ArrayType(LongType, containsNull = false)
val arrayOfOptional = ArrayType(LongType)
assertSingleError(arrayOfOptional, arrayOfRequired, "arr",
"Should not allow array of optional elements to array of required elements") { err =>
assert(err.contains("'arr'"), "Should include type name context")
assert(err.contains("Cannot write nullable elements to array of non-nulls"))
}
}
test("Check array types: writing required to optional elements is allowed") {
val arrayOfRequired = ArrayType(LongType, containsNull = false)
val arrayOfOptional = ArrayType(LongType)
assertAllowed(arrayOfRequired, arrayOfOptional, "arr",
"Should allow array of required elements to array of optional elements")
}
test("Check map value types: type promotion is allowed") {
val mapOfLong = MapType(StringType, LongType)
val mapOfInt = MapType(StringType, IntegerType)
assertAllowed(mapOfInt, mapOfLong, "m", "Should allow map of int written to map of long column")
}
test("Check map value types: cannot write optional to required values") {
val mapOfRequired = MapType(StringType, LongType, valueContainsNull = false)
val mapOfOptional = MapType(StringType, LongType)
assertSingleError(mapOfOptional, mapOfRequired, "m",
"Should not allow map of optional values to map of required values") { err =>
assert(err.contains("'m'"), "Should include type name context")
assert(err.contains("Cannot write nullable values to map of non-nulls"))
}
}
test("Check map value types: writing required to optional values is allowed") {
val mapOfRequired = MapType(StringType, LongType, valueContainsNull = false)
val mapOfOptional = MapType(StringType, LongType)
assertAllowed(mapOfRequired, mapOfOptional, "m",
"Should allow map of required elements to map of optional elements")
}
test("Check map key types: type promotion is allowed") {
val mapKeyLong = MapType(LongType, StringType)
val mapKeyInt = MapType(IntegerType, StringType)
assertAllowed(mapKeyInt, mapKeyLong, "m",
"Should allow map of int written to map of long column")
}
test("Check types with multiple errors") {
val readType = StructType(Seq(
StructField("a", ArrayType(DoubleType, containsNull = false)),
StructField("arr_of_structs", ArrayType(point2, containsNull = false)),
StructField("bad_nested_type", ArrayType(StringType)),
StructField("m", MapType(LongType, FloatType, valueContainsNull = false)),
StructField("map_of_structs", MapType(StringType, point3, valueContainsNull = false)),
StructField("x", IntegerType, nullable = false),
StructField("missing1", StringType, nullable = false),
StructField("missing2", StringType)
))
val missingMiddleField = StructType(Seq(
StructField("x", FloatType, nullable = false),
StructField("z", FloatType, nullable = false)))
val writeType = StructType(Seq(
StructField("a", ArrayType(StringType)),
StructField("arr_of_structs", ArrayType(point3)),
StructField("bad_nested_type", point3),
StructField("m", MapType(StringType, BooleanType)),
StructField("map_of_structs", MapType(StringType, missingMiddleField)),
StructField("y", StringType)
))
assertNumErrors(writeType, readType, "top", "Should catch 14 errors", 14) { errs =>
assert(errs(0).contains("'top.a.element'"), "Should identify bad type")
assert(errs(0).contains("Cannot safely cast"))
assert(errs(0).contains("string to double"))
assert(errs(1).contains("'top.a'"), "Should identify bad type")
assert(errs(1).contains("Cannot write nullable elements to array of non-nulls"))
assert(errs(2).contains("'top.arr_of_structs.element'"), "Should identify bad type")
assert(errs(2).contains("'z'"), "Should identify bad field")
assert(errs(2).contains("Cannot write extra fields to struct"))
assert(errs(3).contains("'top.arr_of_structs'"), "Should identify bad type")
assert(errs(3).contains("Cannot write nullable elements to array of non-nulls"))
assert(errs(4).contains("'top.bad_nested_type'"), "Should identify bad type")
assert(errs(4).contains("is incompatible with"))
assert(errs(5).contains("'top.m.key'"), "Should identify bad type")
assert(errs(5).contains("Cannot safely cast"))
assert(errs(5).contains("string to bigint"))
assert(errs(6).contains("'top.m.value'"), "Should identify bad type")
assert(errs(6).contains("Cannot safely cast"))
assert(errs(6).contains("boolean to float"))
assert(errs(7).contains("'top.m'"), "Should identify bad type")
assert(errs(7).contains("Cannot write nullable values to map of non-nulls"))
assert(errs(8).contains("'top.map_of_structs.value'"), "Should identify bad type")
assert(errs(8).contains("expected 'y', found 'z'"), "Should detect name mismatch")
assert(errs(8).contains("field name does not match"), "Should identify name problem")
assert(errs(9).contains("'top.map_of_structs.value'"), "Should identify bad type")
assert(errs(9).contains("'z'"), "Should identify missing field")
assert(errs(9).contains("missing fields"), "Should detect missing field")
assert(errs(10).contains("'top.map_of_structs'"), "Should identify bad type")
assert(errs(10).contains("Cannot write nullable values to map of non-nulls"))
assert(errs(11).contains("'top.x'"), "Should identify bad type")
assert(errs(11).contains("Cannot safely cast"))
assert(errs(11).contains("string to int"))
assert(errs(12).contains("'top'"), "Should identify bad type")
assert(errs(12).contains("expected 'x', found 'y'"), "Should detect name mismatch")
assert(errs(12).contains("field name does not match"), "Should identify name problem")
assert(errs(13).contains("'top'"), "Should identify bad type")
assert(errs(13).contains("'missing1'"), "Should identify missing field")
assert(errs(13).contains("missing fields"), "Should detect missing field")
}
}
// Helper functions
def assertAllowed(
writeType: DataType,
readType: DataType,
name: String,
desc: String,
byName: Boolean = true): Unit = {
assert(
DataType.canWrite(writeType, readType, byName, analysis.caseSensitiveResolution, name,
storeAssignmentPolicy,
errMsg => fail(s"Should not produce errors but was called with: $errMsg")), desc)
}
def assertSingleError(
writeType: DataType,
readType: DataType,
name: String,
desc: String)
(errFunc: String => Unit): Unit = {
assertNumErrors(writeType, readType, name, desc, 1) { errs =>
errFunc(errs.head)
}
}
def assertNumErrors(
writeType: DataType,
readType: DataType,
name: String,
desc: String,
numErrs: Int,
byName: Boolean = true)
(checkErrors: Seq[String] => Unit): Unit = {
val errs = new mutable.ArrayBuffer[String]()
assert(
DataType.canWrite(writeType, readType, byName, analysis.caseSensitiveResolution, name,
storeAssignmentPolicy, errMsg => errs += errMsg) === false, desc)
assert(errs.size === numErrs, s"Should produce $numErrs error messages")
checkErrors(errs)
}
}
| apache-2.0 |
frett/cas | support/cas-server-support-validation/src/main/java/org/apereo/cas/web/ServiceValidateController.java | 3654 | package org.apereo.cas.web;
import org.apereo.cas.CasProtocolConstants;
import org.apereo.cas.CentralAuthenticationService;
import org.apereo.cas.authentication.AuthenticationSystemSupport;
import org.apereo.cas.authentication.principal.WebApplicationService;
import org.apereo.cas.services.ServicesManager;
import org.apereo.cas.ticket.proxy.ProxyHandler;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.validation.CasProtocolValidationSpecification;
import org.apereo.cas.validation.RequestedContextValidator;
import org.apereo.cas.validation.ServiceTicketValidationAuthorizersExecutionPlan;
import org.apereo.cas.web.support.ArgumentExtractor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.View;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* @author Misagh Moayyed
* @since 4.2
*/
@Slf4j
public class ServiceValidateController extends AbstractServiceValidateController {
public ServiceValidateController(final CasProtocolValidationSpecification validationSpecification,
final AuthenticationSystemSupport authenticationSystemSupport,
final ServicesManager servicesManager,
final CentralAuthenticationService centralAuthenticationService,
final ProxyHandler proxyHandler,
final ArgumentExtractor argumentExtractor,
final RequestedContextValidator requestedContextValidator,
final View jsonView,
final View successView, final View failureView,
final String authnContextAttribute,
final ServiceTicketValidationAuthorizersExecutionPlan validationAuthorizers,
final boolean renewEnabled) {
super(CollectionUtils.wrapSet(validationSpecification), validationAuthorizers,
authenticationSystemSupport, servicesManager, centralAuthenticationService, proxyHandler,
successView, failureView, argumentExtractor, requestedContextValidator, jsonView, authnContextAttribute, renewEnabled);
}
/**
* Handle model and view.
*
* @param request the request
* @param response the response
* @return the model and view
* @throws Exception the exception
*/
@GetMapping(path = CasProtocolConstants.ENDPOINT_SERVICE_VALIDATE)
@Override
public ModelAndView handleRequestInternal(final HttpServletRequest request, final HttpServletResponse response) throws Exception {
return super.handleRequestInternal(request, response);
}
@Override
protected void prepareForTicketValidation(final HttpServletRequest request, final WebApplicationService service, final String serviceTicketId) {
super.prepareForTicketValidation(request, service, serviceTicketId);
LOGGER.debug("Preparing to validate ticket [{}] for service [{}] via [{}]. Do note that this validation request "
+ "is not equipped to release principal attributes to applications. To access the authenticated "
+ "principal along with attributes, invoke the [{}] endpoint instead.",
CasProtocolConstants.ENDPOINT_SERVICE_VALIDATE,
serviceTicketId, service, CasProtocolConstants.ENDPOINT_SERVICE_VALIDATE_V3);
}
}
| apache-2.0 |
whackpad/whackpad | etherpad/src/etherpad/changes/changes.js | 31725 | import("execution");
import("exceptionutils");
import("stringutils");
import("sqlbase.sqlobj");
import("varz");
import("jsutils.uniqueNumbers");
import("crypto");
import("email.sendEmail");
import("etherpad.changes.follow.FOLLOW");
import("etherpad.changes.follow.getUserFollowPrefsForPad");
import("etherpad.collab.ace.easysync2.Changeset");
import("etherpad.collab.ace.linestylefilter.linestylefilter");
import("etherpad.collab.ace.domline.domline");
import("etherpad.importexport.table.renderStaticTable");
import("etherpad.globals");
import("etherpad.log");
import("etherpad.pad.model");
import("etherpad.pad.padusers");
import("etherpad.pad.padutils");
import("etherpad.pad.pad_security");
import("etherpad.pro.domains");
import("etherpad.pro.pro_apns");
import("etherpad.pro.pro_utils");
import("etherpad.pro.pro_padmeta");
import("etherpad.pro.pro_accounts");
import("etherpad.statistics.email_tracking");
import("etherpad.utils");
import("etherpad.utils.renderTemplateAsString");
import("etherpad.collab.collab_server");
import("etherpad.helpers");
import("etherpad.debug.dmesg");
import("funhtml");
import("funhtml.*");
function _getFollowers(globalPadId) {
var followers = sqlobj.selectMulti("PAD_FOLLOW", { 'id': globalPadId, 'followPref': FOLLOW.EVERY });
return followers.map(function (r) { return r.userId; });
}
function _getColorsForEditors(historicalAuthorData) {
var colorIdForAuthor = {};
for (var author in historicalAuthorData) {
var accountId = padusers.getAccountIdForProAuthor(author);
colorIdForAuthor[accountId] = historicalAuthorData[author].colorId;
}
return colorIdForAuthor;
}
function accountIdsToNotifyTester (globalPadId, creatorId, guestPolicy) {
return _accountIdsToNotify(globalPadId, creatorId, guestPolicy);
}
function _accountIdsToNotify(globalPadId, creatorId, guestPolicy) {
var peopleWithAccess = [];
if (guestPolicy == "link") {
peopleWithAccess = pad_security.getInvitedUsers(globalPadId);
} else {
// people are only marked as having accessed if the pad is deny or friends
// when they visit. so it's *extremely* unreliable.
// still it's unsafe to not check for it or we'll spam a ton of people -
// especially in the "allow" case of public pads, but also for old school
// "friends" pads.
peopleWithAccess = pad_security.getInvitedUsersWhoAccessedPad(globalPadId);
}
peopleWithAccess.push(creatorId);
// filter etherpad admin and/or null creator id
peopleWithAccess = peopleWithAccess.filter(function(id) { return id; });
// don't send mail to invitees who don't want mail by default
var excludeList = {};
pro_accounts.getAccountsByIds(peopleWithAccess).forEach(function(acct) {
if (pro_accounts.getAccountDoesNotWantFollowEmail(acct)) {
excludeList[acct.id] = true;
}
});
peopleWithAccess = peopleWithAccess.filter(function(accountId){ return !excludeList[accountId] });
var followerPrefs = getUserFollowPrefsForPad(globalPadId, peopleWithAccess);
var authenticatedFollowers = peopleWithAccess.filter(function(accountId) {
return (followerPrefs[accountId] == FOLLOW.DEFAULT ||
followerPrefs[accountId] == FOLLOW.EVERY);
});
// add all the followers who have access because the pad isn't invite only
if (guestPolicy != "deny") {
authenticatedFollowers = authenticatedFollowers.concat(_getFollowers(globalPadId));
}
return uniqueNumbers(authenticatedFollowers);
}
function _sendEmailsAboutChanges(globalPadId, padTitle, htmlParts, accountIdsOfChangeAuthors, mentionedUserIds, accountIds, revNum, optNotifyEditors) {
for (var i=0; i<accountIds.length; i++) {
var accountId = accountIds[i];
if (accountId == 0) {
continue; // skip admin
}
var acct = pro_accounts.getAccountById(accountId, true/*opt_skipDeleted*/);
if (!acct) { continue; } // account has been deleted?
var isOwnChange = (accountIdsOfChangeAuthors.length==1 &&
accountIdsOfChangeAuthors[0] == acct.id);
if (isOwnChange && !optNotifyEditors) {
continue;
}
// copy + generate unsubscribe link
var htmlPartsForSending = htmlParts.slice(0);
var localPadId = padutils.globalToLocalId(globalPadId);
var sig = crypto.signRequest({accountId: pro_accounts.getEncryptedUserId(accountId), globalPadId: globalPadId});
var unsubURL = utils.absoluteURL("/ep/pad/follow/" + localPadId, {accountId: pro_accounts.getEncryptedUserId(accountId), followPref: FOLLOW.NO_EMAIL, sig: sig}, _subdomainForPadId(globalPadId));
var unsubText = "<p style=\"color:#888\">To stop receiving email about changes to this pad, <a href=\""+ unsubURL + "\">unsubscribe</a>.</p>";
htmlPartsForSending.splice(htmlPartsForSending.length-1, 0, unsubText);
var trackingId = email_tracking.trackEmailSent(acct.email, email_tracking.CHANGES, 1);
// come up with email subject
var subj = "hackpad: " + padTitle + " edited";
_sendEmail(globalPadId, revNum, subj, acct, htmlPartsForSending, trackingId);
}
}
function _sendEmailsAboutMentions(globalPadId, padTitle, htmlParts, changeAuthorIds, mentionedUserIds, revNum) {
for (var i=0; i<mentionedUserIds.length; i++) {
if (mentionedUserIds[i] == 0) { continue; }
var acct = pro_accounts.getAccountById(mentionedUserIds[i], true/*opt_skipDeleted*/);
if (!acct) { continue; } // account has been deleted?
// todo: could imagine having the host actually be the person who mentioned you
// as opposed to just a random change author
pad_security.ensureUserHasAccessToPad(globalPadId, acct, changeAuthorIds[0]/*hostUserId*/);
var trackingId = email_tracking.trackEmailSent(acct.email, email_tracking.MENTIONS, 1);
var subject = "hackpad: you were mentioned in " + padTitle;
_sendEmail(globalPadId, revNum, subject, acct, htmlParts, trackingId);
var msg = "You were mentioned in " + padTitle;
pro_apns.sendPushNotificationForPad(globalPadId, msg, mentionedUserIds[i], pro_apns.APNS_HP_T_MENTION);
}
}
function _sendEmailsAboutMerge(globalPadId, subject, htmlParts, accountIds, revNum) {
for (var i=0; i<accountIds.length; i++) {
var acct = pro_accounts.getAccountById(accountIds[i], true/*opt_skipDeleted*/);
if (!acct) { continue; } // account has been deleted?
var htmlPartsForSending = htmlParts.slice(0); // copy
var trackingId = email_tracking.trackEmailSent(acct.email, email_tracking.MERGE, 1);
_sendEmail(globalPadId, revNum, subject, acct, htmlPartsForSending, trackingId);
}
}
function _setLastSyndicatedRev (globalPadId, endRev) {
sqlobj.update('PAD_SQLMETA', {id:globalPadId} ,{lastSyndicatedRev: endRev});
}
/*
Contexts in which we may want to send changes:
EVERY change syndication context
figures out all the followers who need an insta-email
it also sends mention emails when this happens
DAILY changes to a pad
looks back on the last 24 hours of changes and sends
them to those who are subscribed to the daily sigest
ideally this should know on a per-user basis what
the last change the use saw was
DAILY changes to many pads
probably just return the content and have the caller email
*/
function syndicateChangesToPad (row, optStartRev, optUpToDateCallback, optForceSyndicateToAll) {
var startRev = optStartRev || row.lastSyndicatedRev + 1;
var endRev = row.headRev;
var globalPadId = row.id;
var upToDateCallback = optUpToDateCallback || _setLastSyndicatedRev;
// filled out with pad lock and used after to send emails
var segments = [];
var htmlParts = [];
var padTitle = "";
var accountIdsOfChangeAuthors = [];
var mentionedUserIds = [];
var accountIds = [];
var sendEmails = false;
var accountIdsToPushNotify = [];
dmesg("Looking from:" + startRev + " to:" + endRev + " in " + globalPadId);
log.custom("padevents", {type:"syndication", padId: globalPadId});
model.accessPadGlobal(globalPadId, function(pad) {
// hack: we don't syndicate wiki pads, so mark ourselves up to date
if (pad.getIsWikiText()) {
upToDateCallback(globalPadId, endRev);
return;
}
// TODO: we don't syndicate forked pads, so mark ourselves up to date
if (pad.getForkedFrom()) {
upToDateCallback(globalPadId, endRev);
return;
}
// load basic info about the pad
var creatorId;
var done = pro_padmeta.accessProPad(globalPadId, function(propad) {
padTitle = propad.getDisplayTitle();
creatorId = propad.getCreatorId();
if (propad.isDeleted()) {
upToDateCallback(globalPadId, endRev);
return true;
}
});
if (done) {
return;
}
// figure out all the people we may want to notify
accountIds = _accountIdsToNotify(globalPadId, creatorId, row.guestPolicy);
accountIdsToPushNotify = accountIds;
// a segment is [currentSegmentStartRev, currentSegmentEndRev,
// currentSegmentAuthors, currentSegmentEndTime]
segments = pad.getEditSegmentsForRange(startRev, endRev);
if (!optForceSyndicateToAll) {
segments = _filterTooRecentSegments(segments, accountIds.length);
}
if (segments.length) {
// load historical authors
var historicalAuthorData = collab_server.buildHistoricalAuthorDataMapFromAText(pad, pad.atext());
var colorIdForAuthor = _getColorsForEditors(historicalAuthorData);
try {
htmlParts = _getHTMLForChanges(pad, padTitle, segments, colorIdForAuthor);
} catch (ex) {
// log and go on
ex.message = "Exception thrown syndicating pad:" + globalPadId + " " + String(ex.message);
log.logException(ex);
}
if (htmlParts && htmlParts.length) {
// there will be -1's here for non-pro users
var connectedUserInfos = collab_server.getConnectedUsers(pad);
var connectedUserIds = connectedUserInfos.map(function(userInfo){
return padusers.getAccountIdForProAuthor(userInfo.userId);
});
// we'll send a separate email to mentioned users
mentionedUserIds = _getMentionedUsersForChanges(pad, segments);
accountIdsToPushNotify = accountIds.filter(function (accountId) {
// include only people who haven't been mentioned
return mentionedUserIds.indexOf(accountId) < 0;
});
accountIds = accountIds.filter(function (accountId) {
// include only people who haven't been mentioned
// and are not connected
return mentionedUserIds.indexOf(accountId) < 0 &&
connectedUserIds.indexOf(accountId) < 0;
});
accountIdsOfChangeAuthors = _getProAuthorIdsForChanges(pad, segments);
sendEmails = true;
}
}
}, 'r');
for (var i=0; i<accountIdsToPushNotify.length; i++) {
// FIXME: Sends two apns edit notifications. One when this first runs but the segments are too new and one when the lastSyndicatedRev is actually updated.
// Quiet edit notifications for now
pro_apns.sendPushNotificationForPad(globalPadId, null, accountIdsToPushNotify[i], pro_apns.APNS_HP_T_EDIT);
}
if (sendEmails) {
_sendEmailsAboutChanges(globalPadId, padTitle, htmlParts, accountIdsOfChangeAuthors, mentionedUserIds, accountIds, endRev, optForceSyndicateToAll);
_sendEmailsAboutMentions(globalPadId, padTitle, htmlParts, accountIdsOfChangeAuthors, mentionedUserIds, endRev);
}
if (segments.length) {
// the final endRev isn't the provided endRev, but the rev of the most recent segment
// that we chose to syndicate (super recent segments may have gotten filtered)
dmesg("syndicated to:" + segments[0][1]);
upToDateCallback(globalPadId, + segments[0][1]);
}
}
function _absoluteUrlByPadId(globalPadId, url) {
var urlPrefix = appjet.config.useHttpsUrls ? "https://" : "http://";
urlPrefix += _domainForPadId(globalPadId);
return urlPrefix + (url ? url : "");
}
function sendMergeRequestEmail(pad, forkInfo, startRev, endRev) {
// load basic info about the pad
var padTitle;
var creatorId;
var globalPadId = padutils.getGlobalPadId(forkInfo.padId);
//pad.getId();
pro_padmeta.accessProPad(globalPadId, function(propad) {
padTitle = propad.getDisplayTitle();
creatorId = propad.getCreatorId();
});
// figure out all the people we may want to notify
var accountIds = [creatorId];
// a segment is [currentSegmentStartRev, currentSegmentEndRev,
// currentSegmentAuthors, currentSegmentEndTime]
var segments = pad.getEditSegmentsForRange(startRev, endRev);
if (segments.length) {
// load historical authors
var historicalAuthorData = collab_server.buildHistoricalAuthorDataMapFromAText(pad, pad.atext());
var colorIdForAuthor = _getColorsForEditors(historicalAuthorData);
var localPadId = padutils.globalToLocalId(pad.getId());
var approveUrl = _absoluteUrlByPadId(pad.getId(), "/ep/pad/merge?padId="+localPadId);
var approveChangesHTML = "<a "+Math.random()+ " href='"+approveUrl+"'>Approve This Change</a><br/><br/>";
var htmlParts = _getHTMLForChanges(pad, padTitle, segments, colorIdForAuthor, approveChangesHTML);
if (htmlParts && htmlParts.length) {
// come up with email subject
var authors = [];
for (var i=0; i<segments.length; i++) {
var authorNums = segments[i][2];
authors = authors.concat(authorNames(authorNums, colorIdForAuthor, false, _absoluteUrlByPadId(pad.getId())));
}
var byLine = authors.join(", ");
var subject = "hackpad: " + padTitle + " change proposed by " + byLine;
_sendEmailsAboutMerge(globalPadId, subject, htmlParts, accountIds, endRev);
}
}
}
function syndicateChanges() {
var sql = "select id, lastSyndicatedRev, headRev, guestPolicy from PAD_SQLMETA where lastSyndicatedRev < headRev";
var rows = sqlobj.executeRaw(sql, []);
dmesg("Syndicating changes");
for (var i=0; i<rows.length; i++) {
var domainId = padutils.getDomainId(rows[i].id);
if (!domains.domainIsOnThisServer(domainId) || !domains.getDomainRecord(domainId)) {
continue;
}
try {
syndicateChangesToPad(rows[i]);
} catch (ex) {
// log and go on
ex.message = "Exception thrown syndicating pad:" + rows[i].id + " " + String(ex.message);
log.logException(ex);
}
}
}
function _subdomainForPadId(padId) {
var domainRecord = domains.getDomainRecord(padutils.getDomainId(padId));
return domainRecord.orgName && domainRecord['subDomain'];
}
function _domainForPadId(padId) {
var subdomain = _subdomainForPadId(padId);
if (subdomain) {
return subdomain + "." + appjet.config['etherpad.canonicalDomain'];
}
return appjet.config['etherpad.canonicalDomain'];
}
var TRACKING_ID_GUID = "290e79ef-7a5a-48d5-aa82-7ef0a8482112";
var TRACKING_ID_GUID_RE = new RegExp(TRACKING_ID_GUID, "g");
function _getHTMLForChanges(pad, padTitle, segments, colorIdForAuthor, opt_approveChangesHTML) {
var htmlParts = [];
var localPadId = padutils.globalToLocalId(pad.getId());
var relativePadUrl = "/" + localPadId + '?eid=' + TRACKING_ID_GUID + "#" + encodeURIComponent(padTitle);
var padUrl = _absoluteUrlByPadId(pad.getId(), relativePadUrl);
var emailHeader;
if (opt_approveChangesHTML) {
htmlParts.push(opt_approveChangesHTML);
emailHeader = "Change proposed to " + padTitle + " by ";
} else {
var configureNotificationsUrl = appjet.config.useHttpsUrls ? "https://" : "http://";
configureNotificationsUrl += _domainForPadId(pad.getId());
configureNotificationsUrl += "/ep/pad/follow/" + localPadId + "/";
var padLinkHtml = "<a style=\"font-weight:bold; font-size: 18px;\" href=\"" + padUrl + "\">" + padTitle + "</a>";
var padEmailSettingsHtml = "<a href='" + configureNotificationsUrl + "'>email settings</a>";
emailHeader = padLinkHtml + " (" +padEmailSettingsHtml +") - edited by ";
}
var isTrivial = true;
for (var i=0; i<segments.length; i++) {
var diffHTML = getDiffHTML(pad, segments[i][0], segments[i][1], segments[i][2], colorIdForAuthor, false, emailHeader);
if (diffHTML != '') {
isTrivial = false;
htmlParts.push(diffHTML);
}
}
if (isTrivial) {
// no nontrivial changes
return null;
}
htmlParts.unshift("<html><body><style>.longkeep {display:none;}</style>");
if (opt_approveChangesHTML) {
htmlParts.push("<p style=\"color:#888\">To reject this change, just ignore this email</p>");
} else {
htmlParts.push("<p style=\"color:#888\">Reply to this email directly or edit it live on hackpad: <a href=\"" + padUrl + "\">" + padTitle + "</a></p>");
}
htmlParts.push("</body></html>");
return htmlParts;
}
function _getProAuthorIdsForChange(pad, segment) {
var proAuthorIds = [];
if (!segment[2]) {
return [];
}
for (var i=0; i<segment[2].length; i++) {
var uid = segment[2][i];
if (!uid || padusers.isGuest(uid)) {
continue;
}
var accountNum = padusers.getAccountIdForProAuthor(uid);
if (accountNum <= 0) {
continue;
}
proAuthorIds.push(accountNum);
}
return proAuthorIds;
}
function _getProAuthorIdsForChanges(pad, segments) {
var proAccountIdsOfChangeAuthorsDict = {};
var proAccountIdsOfChangeAuthors = [];
for (var i=0; i<segments.length; i++) {
var authorIds = _getProAuthorIdsForChange(pad, segments[i]);
for (var j=0; j<authorIds.length; j++) {
proAccountIdsOfChangeAuthorsDict[authorIds[j]] = 1;
}
}
for (var proAccountId in proAccountIdsOfChangeAuthorsDict) {
proAccountIdsOfChangeAuthors.push(parseInt(proAccountId));
}
return proAccountIdsOfChangeAuthors;
}
function _getMentionedUsersForChanges(pad, segments) {
var mentionedUserIds = [];
for (var i=0; i<segments.length; i++) {
var mentions = pad.getUsersNewlyMentionInRevisions(segments[i][0], segments[i][1]);
if (mentions) {
mentionedUserIds = mentionedUserIds.concat(mentions);
}
}
mentionedUserIds = mentionedUserIds.map(function(id) {
try {
return pro_accounts.getUserIdByEncryptedId(id);
} catch (ex) {
ex.message = "Failed to decrypt user id: " + id + " " + String(ex.message);
log.logException(ex);
return null;
}
}).filter(function(id) { return id != null; });
return mentionedUserIds;
}
function _filterTooRecentSegments(segments, audienceSize) {
// calculate the notification delay
var MINUTES = 1000 * 60;
var delayMinutes = Math.max(2, 2 * (audienceSize - 1));
if (audienceSize > 20) {
delayMinutes = 60;
}
dmesg("syndication delay is: " + delayMinutes);
var now = new Date();
var segmentsForSyndication = [];
for (var j=0; j<segments.length; j++) {
dmesg("looking at segment(" + segments[j][0] + "," + segments[j][1] +")");
var segmentDate = new Date(segments[j][3]);
// 10 minutes delay before we trigger a mail; kind of wierd since usually we consider segments to be 30 mins apart
if ((now.getTime() - segmentDate.getTime()) > delayMinutes * MINUTES) {
dmesg("segment is " + String(now.getTime() - segmentDate.getTime()) +"old");
segmentsForSyndication = segments.slice(j);
break;
}
}
return segmentsForSyndication;
}
function _sendEmail(globalPadId, revNum, subj, acct, html, trackingId) {
var localPadId = padutils.globalToLocalId(globalPadId);
var fromAddr = pro_utils.getEmailFromAddr();
var body = html.join("\n");
// render the email with tracking ids filled out
body = body.replace(TRACKING_ID_GUID_RE, trackingId || "")
var inReplyToId = "<" + localPadId + "@" + _domainForPadId(globalPadId) + ">";
var referencesId = "<" + localPadId + '+' + revNum + "@" + _domainForPadId(globalPadId) + ">";
var headers = { "In-Reply-To": inReplyToId, "References": referencesId,
"Content-Transfer-Encoding": "quoted-printable",
"Content-Type": "text/plain; charset=\"utf-8\"" };
try {
dmesg("SENDING EMAIL TO" + acct.id);
log.custom("changesemail", {userId: padusers.getUserIdForProUser(acct.id), toEmails: acct.email, padId: globalPadId});
sendEmail(acct.email, fromAddr, subj, headers, body, "text/html; charset=utf-8");
varz.incrementMetric("changes-mail-send-succeeded");
} catch (ex) {
varz.incrementMetric("changes-mail-send-failed");
log.logException("Failed to send email to: " + acct.email + "(" + ex + ")");
}
}
function authorNames(authorNums, colorIdForAuthor, asHTML, relativeUrlPrefix, pad) {
var authors = [];
for (var i=0; i<authorNums.length; i++) {
if (authorNums[i] && authorNums[i] != "") {
var accountId = parseInt(authorNums[i].split(".")[1]);
if (String(accountId) != String(authorNums[i]).split(".")[1]) {
continue;
}
var authorName = getAuthorName(authorNums[i], pad);
var userId = padusers.getAccountIdForProAuthor(authorNums[i]);
// look up author color
var colorId = colorIdForAuthor[userId];
var color = globals.getPalette()[colorId % globals.getPalette().length];
if (asHTML) {
if (color) {
authors.push("<span style='border-bottom: 2px dotted " + color + ";'><a href='"+ relativeUrlPrefix + pro_accounts.getUserLinkById(userId) + "' style='text-decoration: none;'>" + SPAN(authorName) + "</a></span>");
} else {
authors.push("<a href='"+ relativeUrlPrefix + pro_accounts.getUserLinkById(userId) + "'>" + SPAN(authorName) + "</a>");
}
} else {
authors.push(authorName);
}
}
}
return authors;
}
function getDiffHTML(pad, revNum, revisionIdTo, authorNums, colorIdForAuthor, includeTimestamps, byLineHeader, includeDeletes, optDiffCs, optNotEmail, optIncludeRevertLink) {
var diffAndAuthors = getDiffAndAuthorsHTML(pad, revNum, revisionIdTo, authorNums, colorIdForAuthor, includeDeletes, optDiffCs, optNotEmail);
var authorHTMLParts = [];
authorHTMLParts.push('<div class="author-diff-header">');
var byLine = byLineHeader + diffAndAuthors.authorsHTML;
if (includeTimestamps) {
var revDate = helpers.prettyDate(pad.getRevisionDate(revNum));
byLine += (" - " + revDate);
}
authorHTMLParts.push(byLine);
if (optIncludeRevertLink) {
var localPadId = padutils.globalToLocalId(pad.getId());
authorHTMLParts.push(SPAN(" - "));
if (optNotEmail) {
// web
authorHTMLParts.push(funhtml.FORM({action: '/ep/pad/'+localPadId+'/revert-to/'+revNum, method: 'POST',
style: 'display: inline'},
helpers.xsrfTokenElement(),
funhtml.INPUT({type: 'submit', name:'submit', value:'Revert this' })));
} else {
// no revert links in email
authorHTMLParts.push(A({ href:'/ep/pad/summary/' + encodeURIComponent(localPadId)}, 'View history'));
}
}
authorHTMLParts.push('</div>\n');
if (diffAndAuthors.diffHTML && diffAndAuthors.authorsHTML) {
return authorHTMLParts.join('') + diffAndAuthors.diffHTML;
} else {
return '';
}
}
/*
@returns {diffHTML:.., authorsHTML:}
*/
function getDiffAndAuthorsHTML(pad, revNum, revisionIdTo, authorNums, colorIdForAuthor, includeDeletes, optDiffCs, optNotEmail, maxLines, optHideElipsis) {
var relativeUrlPrefix = (appjet.config.useHttpsUrls ? "https://" : "http://") + _domainForPadId(pad.getId());
// authors html
var authorsHTMLParts = authorNames(authorNums, colorIdForAuthor, true/*asHTML*/, relativeUrlPrefix, pad);
var authorsHTML = authorsHTMLParts.join(", ");
// diff html
var pieces = [];
var atextAndPool = null;
if (optDiffCs) {
atextAndPool = pad.getDiffATextForChangeset(optDiffCs, revNum, includeDeletes);
} else {
atextAndPool = pad.getDiffATextForChangeRange(revNum, revisionIdTo, includeDeletes);
}
if (atextAndPool == null) {
// There are no changes in this range
return '';
}
var atext = atextAndPool[0];
var apool = atextAndPool[1];
var textlines = Changeset.splitTextLines(atext.text);
var alines = Changeset.splitAttributionLines(atext.attribs,
atext.text);
function classToStyle (classes) {
var classes = classes.split(" ");
var styles = [];
if (classes.indexOf("added") > -1) {
// fa-author-p-1 / author-p-1 -> 1
// if it's fa-author, don't add color
if (classes[0].slice(0,2) == "fa") {
//
} else {
var userId = linestylefilter.className2Author(classes[0]);
if (!userId) {
// No author for added line
styles.push("color:#999"); // ignore for now
} else {
userId = padusers.getAccountIdForProAuthor(userId);
// look up author color
var colorId = colorIdForAuthor[userId];
var color = globals.getPalette()[colorId % globals.getPalette().length];
styles.push("border-bottom: 2px dotted " + color);
}
}
} else if (classes.indexOf("removed") >= 0) {
styles.push("color: #999");
styles.push("text-decoration:line-through");
} else {
styles.push("color: #999");
}
//log.info("Classes are " + classes.join(" "));
//log.info("Styles are " + styles.join(";"));
return styles.length ? styles.join(";") : "";
}
var browser = optNotEmail ? "stream" : "email";
var atStart = true;
var i = 0;
var lastSeenShortName = '';
var lastSeenCommentShortName = '';
for(;i<textlines.length && !(i==maxLines);i++) {
var line = textlines[i];
var aline = alines[i];
var emptyLine = (line == '\n');
var node = null;
// handle tables in a special way for now
var wholeLineStyle = "";
if (line == "*\n") {
node = renderStaticTable(aline, apool);
if (node) {
wholeLineStyle = _wholeLineStyleForNode(node, colorIdForAuthor);
}
}
if (!node) {
var domInfo = domline.createDomLine(! emptyLine, true, browser, null, relativeUrlPrefix, _convertEmbedToAnchor, null/*optMath*/, true/*for email*/);
linestylefilter.populateDomLine(line, aline, apool, domInfo, colorIdForAuthor ? classToStyle : null);
domInfo.prepareForAdd();
wholeLineStyle = _wholeLineStyleForNode(domInfo.node, colorIdForAuthor);
// hack: go back in and strip authorship colors that are overrulled by the line author color
var color = _getLineAuthorColor(domInfo.node, colorIdForAuthor);
if (color) {
domInfo.node.innerHTML = domInfo.node.innerHTML.replace(new RegExp(color,"g"), "transparent");
}
node = domInfo.node;
}
if (browser == "email") {
wholeLineStyle += "border-left-style: solid; border-left-width: 6px; padding-left: 20px; max-width:640px"
}
var isWhitespace = /^( )*$/.test(node.innerHTML);
if (!isWhitespace || !atStart) {
atStart = false;
var style = wholeLineStyle ? " style='"+wholeLineStyle+"' " : "";
// heuristically strip what we assume is a line-marker
// todo: actually parse the aline/apool and check
var lineForHash;
if (line[0] == "*") {
lineForHash = line.slice(1);
} else {
lineForHash = line;
}
var lineHash = (new java.lang.String(stringutils.trim(lineForHash))).hashCode();
var localPadId = padutils.globalToLocalId(pad.getId());
var lineUrl;
if (browser == "email") {
lineUrl = _absoluteUrlByPadId(pad.getId(), "/"+localPadId +'?src=email-line&eid='+ TRACKING_ID_GUID +"#:" + lineHash);
} else {
lineUrl = _absoluteUrlByPadId(pad.getId(), "/"+localPadId);
}
var authorName = _getLineAuthorName(pad, node) || '';
var shortName = getShortNameFromFullName(authorName);
var nameToShow = '';
if (node.className.indexOf("allAdd") > -1) {
if (shortName && shortName != lastSeenShortName) {
nameToShow = shortName;
lastSeenShortName = shortName;
}
}
if (node.className.indexOf('line-list-type-comment') > -1) {
if (shortName && shortName != lastSeenCommentShortName) {
nameToShow = shortName;
lastSeenCommentShortName = shortName;
}
}
pieces.push('<div '+ style + ' class="', node.className, '">');
var userId = _getLineUserId(pad, node);
if (userId && browser != 'email' && nameToShow) {
accountId = padusers.getAccountIdForProAuthor(userId);
var color = _getLineAuthorColor(domInfo.node, colorIdForAuthor);
var colorStyle = color ? 'color:' + color : '';
pieces.push('<a style="' + colorStyle + '" href="' + pro_accounts.getUserLinkById(accountId) +
'" class="ace-line-author" style="display: none;">' + SPAN(nameToShow) + '</a>');
}
if (browser == "email") {
pieces.push('<a style="border-bottom:none; color: #000001; text-decoration:none;" href="'+lineUrl+'">',
node.innerHTML, '</a>');
} else {
pieces.push(node.innerHTML);
}
pieces.push('</div>\n');
}
}
if (i == maxLines && !optHideElipsis) {
pieces.push('<div class="ace-line gutter-noauthor">...</div>');
}
return {diffHTML: pieces.join(''), authorsHTML:authorsHTML};
}
function _getLineAuthorColor(node, colorIdForAuthor) {
var classes = node.className.split(" ");
for (var i=0; i<classes.length; i++) {
// fa-author-p-1 / author-p-1 -> p.1
var userId = linestylefilter.className2Author(classes[i]);
if (userId) {
var accountId = padusers.getAccountIdForProAuthor(userId);
var colorId = colorIdForAuthor[accountId];
var color = globals.getPalette()[colorId % globals.getPalette().length];
if (color) {
return color;
}
return null;
}
}
return null;
}
function _getLineUserId(pad, node) {
var classes = node.className.split(" ");
for (var i = 0; i < classes.length; i++) {
// fa-author-p-1 / author-p-1 -> p.1
var userId = linestylefilter.className2Author(classes[i]);
if (userId) {
return userId;
}
}
return null;
}
function _getLineAuthorName(pad, node) {
var userId = _getLineUserId(pad, node);
if (userId) {
return getAuthorName(userId, pad);
}
return null;
}
function getAuthorName(authorNum, pad) {
var authorName;
if (pad) {
var authorInfo = pad.getAuthorData(authorNum);
if (authorInfo) {
authorName = authorInfo.name;
} else {
log.warn("Cannot find author data for author " + authorNum + " in pad " + pad.getId());
authorName = null;
}
}
if (!authorName) {
authorName = padusers.getNameForUserId(authorNum);
}
return authorName || "";
}
function getShortNameFromFullName(fullName) {
if (fullName) {
var authorInitials = fullName.split(' ');
return authorInitials.length == 1 ? authorInitials[0] :
authorInitials[0] + ' ' + authorInitials[authorInitials.length - 1][0];
}
return '';
}
function _wholeLineStyleForNode(node, colorIdForAuthor) {
var color = _getLineAuthorColor(node, colorIdForAuthor);
if (color && node.className.indexOf("allAdd") > -1) {
return "border-left-color:" + color + ";";
} else {
return "border-left-color: white;";
}
}
function _convertEmbedToAnchor(src) {
return '<a href="' + src + '" class="embed">' + src + '</a>';
}
serverhandlers.tasks.changeSyndicationTask = function() {
try {
syndicateChanges();
} finally {
execution.scheduleTask('changes', "changeSyndicationTask", 2*60*1000, []);
}
}
function onStartup() {
if (appjet.config['etherpad.syndicateChanges'] == "true") {
execution.initTaskThreadPool("changes", 1);
execution.scheduleTask('changes', "changeSyndicationTask", 60*1000, []);
} else {
dmesg("Not syndicating pad changes.");
}
}
| apache-2.0 |
gchq/Gaffer | store-implementation/federated-store/src/test/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationTest.java | 2582 | /*
* Copyright 2017-2020 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.gchq.gaffer.federatedstore.operation;
import org.apache.commons.lang3.exception.CloneFailedException;
import org.junit.jupiter.api.Test;
import uk.gov.gchq.gaffer.operation.Operation;
import uk.gov.gchq.gaffer.operation.OperationChain;
import uk.gov.gchq.gaffer.operation.impl.DiscardOutput;
import uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds;
import uk.gov.gchq.gaffer.operation.impl.get.GetElements;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class FederatedOperationTest {
private static class TestFederatedOperation implements FederatedOperation {
@Override
public Operation shallowClone() throws CloneFailedException {
return null;
}
@Override
public Map<String, String> getOptions() {
return null;
}
@Override
public void setOptions(final Map<String, String> options) {
}
}
@Test
public void shouldReturnTrueWhenOpChainHasFederatedOps() {
// Given
final OperationChain<?> opChain = new OperationChain.Builder()
.first(new GetElements())
.then(new DiscardOutput())
.then(new TestFederatedOperation())
.then(new GetElements())
.build();
// When
final boolean result = FederatedOperation.hasFederatedOperations(opChain);
// Then
assertTrue(result);
}
@Test
public void shouldReturnFalseWhenOpChainDoesNotHaveFederatedOps() {
// Given
final OperationChain<?> opChain = new OperationChain.Builder()
.first(new GetAdjacentIds())
.then(new GetElements())
.build();
// When
final boolean result = FederatedOperation.hasFederatedOperations(opChain);
// Then
assertFalse(result);
}
}
| apache-2.0 |
amalkasubasinghe/product-apim | modules/integration/tests-integration/tests-backend/src/test/java/org/wso2/am/integration/tests/token/OpenIDTokenAPITestCase.java | 9952 | /*
*Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
*WSO2 Inc. licenses this file to you under the Apache License,
*Version 2.0 (the "License"); you may not use this file except
*in compliance with the License.
*You may obtain a copy of the License at
*
*http://www.apache.org/licenses/LICENSE-2.0
*
*Unless required by applicable law or agreed to in writing,
*software distributed under the License is distributed on an
*"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
*KIND, either express or implied. See the License for the
*specific language governing permissions and limitations
*under the License.
*/
package org.wso2.am.integration.tests.token;
import org.json.JSONObject;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Factory;
import org.testng.annotations.Test;
import org.wso2.am.integration.test.utils.base.APIMIntegrationBaseTest;
import org.wso2.am.integration.test.utils.base.APIMIntegrationConstants;
import org.wso2.am.integration.test.utils.bean.APILifeCycleState;
import org.wso2.am.integration.test.utils.bean.APILifeCycleStateRequest;
import org.wso2.am.integration.test.utils.bean.APIRequest;
import org.wso2.am.integration.test.utils.bean.APPKeyRequestGenerator;
import org.wso2.am.integration.test.utils.bean.SubscriptionRequest;
import org.wso2.am.integration.test.utils.clients.APIPublisherRestClient;
import org.wso2.am.integration.test.utils.clients.APIStoreRestClient;
import org.wso2.carbon.automation.engine.annotations.ExecutionEnvironment;
import org.wso2.carbon.automation.engine.annotations.SetEnvironment;
import org.wso2.carbon.automation.engine.context.AutomationContext;
import org.wso2.carbon.automation.engine.context.ContextXpathConstants;
import org.wso2.carbon.automation.engine.context.TestUserMode;
import org.wso2.carbon.automation.test.utils.http.client.HttpRequestUtil;
import org.wso2.carbon.integration.common.utils.mgt.ServerConfigurationManager;
import org.wso2.carbon.automation.test.utils.http.client.HttpResponse;
import java.io.File;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
/**
* This test will cover OpenId based access token generation and validation for users
* Here we will retrieve access tokens with open id scope and use it for user info API
*/
@SetEnvironment(executionEnvironments = {ExecutionEnvironment.ALL})
public class OpenIDTokenAPITestCase extends APIMIntegrationBaseTest {
private APIPublisherRestClient apiPublisher;
private APIStoreRestClient apiStore;
private ServerConfigurationManager serverConfigurationManager;
private String publisherURLHttp;
private String storeURLHttp;
private String executionEnvironment;
@Factory(dataProvider = "userModeDataProvider")
public OpenIDTokenAPITestCase(TestUserMode userMode) {
this.userMode = userMode;
}
@BeforeClass(alwaysRun = true)
public void setEnvironment() throws Exception {
super.init(userMode);
executionEnvironment =
gatewayContextWrk.getConfigurationValue(ContextXpathConstants.EXECUTION_ENVIRONMENT);
publisherURLHttp = publisherUrls.getWebAppURLHttp();
storeURLHttp = storeUrls.getWebAppURLHttp();
if(this.executionEnvironment.equalsIgnoreCase(ExecutionEnvironment.STANDALONE.name())) {
serverConfigurationManager = new ServerConfigurationManager(
new AutomationContext(APIMIntegrationConstants.AM_PRODUCT_GROUP_NAME,
APIMIntegrationConstants.AM_GATEWAY_WRK_INSTANCE, TestUserMode.SUPER_TENANT_ADMIN));
serverConfigurationManager.applyConfigurationWithoutRestart(
new File(getAMResourceLocation() + File.separator + "configFiles/tokenTest/" + "api-manager.xml"));
serverConfigurationManager.applyConfiguration(
new File(getAMResourceLocation() + File.separator + "configFiles/tokenTest/" + "log4j.properties"));
}
apiPublisher = new APIPublisherRestClient(publisherURLHttp);
apiStore = new APIStoreRestClient(storeURLHttp);
}
@Test(groups = {"wso2.am"}, description = "Token API Test sample")
public void testTokenAPITestCase() throws Exception {
String APIName = "openIDTokenTestAPI";
String APIContext = "openIDTokenTestAPI";
String tags = "youtube, token, media";
String url = "http://gdata.youtube.com/feeds/api/standardfeeds";
String description = "This is test API create by API manager integration test";
String providerName = "admin";
String APIVersion = "1.0.0";
apiPublisher.login(publisherContext.getSuperTenant().getContextUser().getUserName(),
publisherContext.getSuperTenant().getContextUser().getPassword());
APIRequest apiRequest = new APIRequest(APIName, APIContext, new URL(url));
apiRequest.setTags(tags);
apiRequest.setDescription(description);
apiRequest.setVersion(APIVersion);
apiRequest.setSandbox(url);
apiPublisher.addAPI(apiRequest);
APILifeCycleStateRequest updateRequest =
new APILifeCycleStateRequest(APIName, providerName, APILifeCycleState.PUBLISHED);
apiPublisher.changeAPILifeCycleStatus(updateRequest);
apiStore.login(publisherContext.getSuperTenant().getContextUser().getUserName(),
publisherContext.getSuperTenant().getContextUser().getPassword());
// Create application
apiStore.addApplication("OpenIDTokenTestAPIApplication", "Gold", "", "this-is-test");
SubscriptionRequest subscriptionRequest =
new SubscriptionRequest(APIName, publisherContext.getSuperTenant().getContextUser().getUserName());
subscriptionRequest.setApplicationName("OpenIDTokenTestAPIApplication");
subscriptionRequest.setTier("Gold");
apiStore.subscribe(subscriptionRequest);
//Generate sandbox Token and invoke with that
APPKeyRequestGenerator generateAppKeyRequestSandBox = new APPKeyRequestGenerator("OpenIDTokenTestAPIApplication");
generateAppKeyRequestSandBox.setKeyType("SANDBOX");
String responseStringSandBox = apiStore.generateApplicationKey(generateAppKeyRequestSandBox).getData();
JSONObject responseSandBOX = new JSONObject(responseStringSandBox);
String SANDbOXAccessToken = responseSandBOX.getJSONObject("data").getJSONObject("key").get("accessToken").toString();
Map<String, String> requestHeadersSandBox = new HashMap<String, String>();
requestHeadersSandBox.put("Authorization", "Bearer " + SANDbOXAccessToken);
HttpResponse youTubeResponseSandBox = HttpRequestUtil.doGet(gatewayUrlsWrk.getWebAppURLNhttp() +
"OpenIDTokenTestAPI/1.0.0/most_popular", requestHeadersSandBox);
//Assert.assertEquals(youTubeResponseSandBox.getResponseCode(), 202, "Response code mismatched");
//Generate production token and invoke with that
APPKeyRequestGenerator generateAppKeyRequest = new APPKeyRequestGenerator("OpenIDTokenTestAPIApplication");
String responseString = apiStore.generateApplicationKey(generateAppKeyRequest).getData();
JSONObject response = new JSONObject(responseString);
/*Response would be like -
{"validityTime":"360000","consumerKey":"Ow2cGYBf3xlAPpG3Q51W_3qnoega",
"accessToken":"qo3oNebQaF16C6qw1a56aZn2nwEa","enableRegenarate":true,"accessallowdomains":"ALL","
consumerSecret":"ctHfsc1jFR7ovUgZ0oeHK8i9F9oa"}*/
String consumerKey = response.getJSONObject("data").getJSONObject("key").getString("consumerKey");
String consumerSecret = response.getJSONObject("data").getJSONObject("key").getString("consumerSecret");
//Obtain user access token
Thread.sleep(2000);
String requestBody = "grant_type=password&username=admin&password=admin&scope=openid";
URL tokenEndpointURL = new URL(gatewayUrlsWrk.getWebAppURLNhttps() + "token");
JSONObject accessTokenGenerationResponse =
new JSONObject(apiStore.generateUserAccessKey(consumerKey, consumerSecret, requestBody,
tokenEndpointURL).getData());
/*Response would be like -
{"token_type":"bearer","expires_in":3600,"refresh_token":"736b6b5354e4cf24f217718b2f3f72b",
"access_token":"e06f12e3d6b1367d8471b093162f6729"}
*/
String userAccessToken = accessTokenGenerationResponse.getString("access_token");
String scope = accessTokenGenerationResponse.getString("scope");
Assert.assertTrue(scope.contains("openid"), "Response data mismatched, openid scope test failed due to error in response");
Map<String, String> requestHeaders = new HashMap<String, String>();
requestHeaders.put("Authorization", "Bearer " + userAccessToken);
Thread.sleep(2000);
HttpResponse youTubeResponse = HttpRequestUtil
.doGet(gatewayUrlsMgt.getWebAppURLNhttp() + "oauth2/userinfo?schema=openid", requestHeaders);
Assert.assertEquals(youTubeResponse.getResponseCode(), 200, "Response code mismatched");
}
@AfterClass(alwaysRun = true)
public void destroy() throws Exception {
super.cleanUp();
if(this.executionEnvironment.equalsIgnoreCase(ExecutionEnvironment.STANDALONE.name())) {
serverConfigurationManager.restoreToLastConfiguration();
}
}
@DataProvider
public static Object[][] userModeDataProvider() {
return new Object[][]{
new Object[]{TestUserMode.SUPER_TENANT_ADMIN},
new Object[]{TestUserMode.TENANT_ADMIN},
};
}
}
| apache-2.0 |
cloudera/hue | apps/filebrowser/src/filebrowser/lib/archives_test.py | 3445 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from . import archives
import unittest
import os
from filebrowser.lib.archives import IllegalPathException
from nose.tools import assert_true, assert_equal
class ArchiveTest(unittest.TestCase):
def test_zip(self):
FILE = os.path.realpath('apps/filebrowser/src/filebrowser/test_data/test.zip')
# Extract the file
# This file should only have 'test.txt' in it
directory = archives.archive_factory(FILE, 'zip').extract()
assert_true(os.path.exists(directory))
assert_true(os.path.isdir(directory))
assert_true(os.path.isfile(directory + '/test.txt'))
assert_equal(os.path.getsize(directory + '/test.txt'), 4)
FILE = os.path.realpath('apps/filebrowser/src/filebrowser/test_data/test5.zip')
# Extract the file
# This file should only have 'test.txt' in it
directory = archives.archive_factory(FILE, 'zip').extract()
assert_true(os.path.exists(directory))
assert_true(os.path.isdir(directory))
assert_true(os.path.isfile(directory + '/tmp/temp/test.txt'))
assert_equal(os.path.getsize(directory + '/tmp/temp/test.txt'), 5)
def test_tgz(self):
FILE = os.path.realpath('apps/filebrowser/src/filebrowser/test_data/test.tar.gz')
# Extract the file
# This file should only have 'test.txt' in it
directory = archives.archive_factory(FILE, 'tgz').extract()
assert_true(os.path.exists(directory))
assert_true(os.path.isdir(directory))
assert_true(os.path.isfile(directory + '/test.txt'))
assert_equal(os.path.getsize(directory + '/test.txt'), 4)
FILE = os.path.realpath('apps/filebrowser/src/filebrowser/test_data/test2.tar.gz')
# Extract the file
# This file should only have 'test.txt' in it
directory = archives.archive_factory(FILE, 'tar.gz').extract()
assert_true(os.path.exists(directory))
assert_true(os.path.isdir(directory))
assert_true(os.path.isfile(directory + '/home/docs/test.txt'))
assert_equal(os.path.getsize(directory + '/home/docs/test.txt'), 4)
# This file should not be extracted as it contains illegal path '../../../Desktop/test.txt'
FILE = os.path.realpath('apps/filebrowser/src/filebrowser/test_data/test3.tar.gz')
factory = archives.archive_factory(FILE, 'tar.gz')
self.assertRaises(IllegalPathException, factory.extract)
# This file should not be extracted as it contains absolute path
FILE = os.path.realpath('apps/filebrowser/src/filebrowser/test_data/test4.tar.gz')
factory = archives.archive_factory(FILE, 'tar.gz')
self.assertRaises(IllegalPathException, factory.extract)
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
jandockx/ppwcode-recovered-from-google-code | java/vernacular/persistence/dev/d03/src/main/java/org/ppwcode/vernacular/persistence_III/junit/jpa/AbstractJpaPersistentBeanTest.java | 6774 | /*<license>
Copyright 2004 - $Date$ by PeopleWare n.v..
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
</license>*/
package org.ppwcode.vernacular.persistence_III.junit.jpa;
import static org.junit.Assert.assertNotNull;
import java.io.Serializable;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.ppwcode.bean_VI.CompoundPropertyException;
import org.ppwcode.bean_VI.PropertyException;
import org.ppwcode.vernacular.persistence_III.PersistentBean;
import org.ppwcode.vernacular.persistence_III.dao.PagingList;
import org.toryt.annotations_I.Basic;
import org.toryt.annotations_I.Expression;
import org.toryt.annotations_I.Invars;
import org.toryt.annotations_I.MethodContract;
/**
* A simple helper class for Hibernate actions within jUnit tests.
*
* @invar getClassUnderTest() != null;
* @invar PersistentBean.class.isAssignableFrom(getClassUnderTest());
*
* @author David Van Keer
* @author Jan Dockx
* @author Tom Mahieu
* @author Peopleware n.v.
*/
public abstract class AbstractJpaPersistentBeanTest<_Id_ extends Serializable, _PersistentBean_ extends PersistentBean<_Id_>> extends AbstractJpaTest {
private static final Log LOG = LogFactory.getLog(AbstractJpaPersistentBeanTest.class);
/**
* Create a new test for the given class.
*/
@MethodContract(
pre = @Expression("_classUnderTest != null"),
post = @Expression("classUnderTest == _classUnderTest")
)
protected AbstractJpaPersistentBeanTest(final Class<_PersistentBean_> classUnderTest) {
assert classUnderTest != null;
$classUnderTest = classUnderTest;
}
/*<property name="class under test">*/
//------------------------------------------------------------------
/**
* Returns the class that is tested.
*/
@Basic(invars = @Expression("classUnderTest != null"))
public final Class<_PersistentBean_> getClassUnderTest() {
return $classUnderTest;
}
@Invars(@Expression("$classUnderTest != null"))
private Class<_PersistentBean_> $classUnderTest;
/*</property>*/
/**
* Tests all instances of {@link #getClassUnderTest()} in the underlying
* storage.
* The method {@link #validatePersistentBean(PersistentBean)} is used to test
* the persistent beans.
* When logging is debug enabled, we only retrieve and test 1 page.
*/
public void testAlInstances() {
LOG.debug("Creating a new Entity Manager and starting a new transaction.");
createEntityManager();
LOG.info("Creating paging set to retrieve instances of " + getClassUnderTest() + " from database in a new session.");
PagingList<_Id_, _PersistentBean_>.PagesIterator pages = loadInstancesToTest().listIterator();
if (pages.hasNext()) {
LOG.info("Retrieving instances of page " + pages.nextIndex() + " of "+ getClassUnderTest() + " from database.");
List<_PersistentBean_> pbs = pages.next();
LOG.info("Retrieved " + pbs.size() + " PersistentBeans.");
for (_PersistentBean_ pb : pbs) {
validatePersistentBean(pb);
}
}
LOG.debug("Closing session");
discardEntityManager();
}
/**
* Overwrite if you do not wish to test all instances.
* Session is open.
*/
protected PagingList<_Id_, _PersistentBean_> loadInstancesToTest() {
return retrievePages(getClassUnderTest());
}
// /**
// * Retrieves the class contract corresponding to the class that is tested.
// *
// * @return (ClassContract)Contracts.typeContractInstance(getClassUnderTest())
// * does not throw an exception
// * ? result == (ClassContract)Contracts.typeContractInstance(getClassUnderTest())
// * : result == null;
// */
// protected final ClassContract getClassContract() {
// ClassContract result = null;
// try {
// result = (ClassContract)Contracts.typeContractInstance(getClassUnderTest());
// }
// catch (IOException e) {
// assert false : "IOException should not happen: " + e;
// }
// catch (ClassNotFoundException e) {
// assert false : "ClassNotFoundException should not happen: " + e;
// }
// return result;
// }
/**
* Validate the given persistent bean.
* The following validations are executed:
* - the given persistent bean should be effective
* - the invariants are checked
* - some extra validation, using {@link #extraPersistentBeanValidation(PersistentBean)}
*/
protected void validatePersistentBean(final _PersistentBean_ pb) {
if (LOG.isDebugEnabled()) {
LOG.debug("pb: " + ((pb == null) ? "null" : pb.toString()));
}
assertNotNull(pb);
// validateTypeInvariants(pb);
boolean civilized = pb.isCivilized();
/* data in DB must not really be civilized. What we STORE must be,
* but what we get doesn't have to be (as long as type invariants
* are ok.
* But it is something weird: WARN.
*/
if (LOG.isWarnEnabled() && (!civilized)) {
CompoundPropertyException cpe = pb.getWildExceptions();
LOG.warn("Not civilized: " + pb);
for (PropertyException pe : cpe.getAllElementExceptions()) {
LOG.warn(" " + pe.getLocalizedMessage());
LOG.warn(" originType: " + pe.getOriginType());
LOG.warn(" origin: " + pe.getOrigin());
LOG.warn(" propertyName: " + pe.getPropertyName());
}
}
extraPersistentBeanValidation(pb);
}
/**
* Some extra validation to be performed on the given persistent bean.
* Should be overridden by subclasses.
*/
protected void extraPersistentBeanValidation(final _PersistentBean_ pb) {
// NOP
}
// private void validateTypeInvariants(final Object instance) {
// assert instance != null;
// LOG.debug("getClassContract(): " + getClassContract());
// Set invars = getClassContract().getTypeInvariantConditions();
// Map context = new HashMap();
// context.put(Condition.SUBJECT_KEY, instance);
// Iterator iter = invars.iterator();
// while (iter.hasNext()) {
// Condition c = (Condition)iter.next();
// boolean result = c.validate(context);
// if (LOG.isErrorEnabled() && (!result)) {
// LOG.error("type invariant violation: " + c + " for " + instance);
// }
// assertTrue(result);
// }
// }
}
| apache-2.0 |
plxaye/chromium | src/webkit/blob/local_file_stream_reader_unittest.cc | 8559 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "webkit/blob/local_file_stream_reader.h"
#include <string>
#include "base/file_util.h"
#include "base/files/file_path.h"
#include "base/files/scoped_temp_dir.h"
#include "base/memory/scoped_ptr.h"
#include "base/message_loop.h"
#include "base/platform_file.h"
#include "base/threading/thread.h"
#include "net/base/io_buffer.h"
#include "net/base/net_errors.h"
#include "net/base/test_completion_callback.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace webkit_blob {
namespace {
const char kTestData[] = "0123456789";
const int kTestDataSize = arraysize(kTestData) - 1;
void ReadFromReader(LocalFileStreamReader* reader,
std::string* data, size_t size,
int* result) {
ASSERT_TRUE(reader != NULL);
ASSERT_TRUE(result != NULL);
*result = net::OK;
net::TestCompletionCallback callback;
size_t total_bytes_read = 0;
while (total_bytes_read < size) {
scoped_refptr<net::IOBufferWithSize> buf(
new net::IOBufferWithSize(size - total_bytes_read));
int rv = reader->Read(buf, buf->size(), callback.callback());
if (rv == net::ERR_IO_PENDING)
rv = callback.WaitForResult();
if (rv < 0)
*result = rv;
if (rv <= 0)
break;
total_bytes_read += rv;
data->append(buf->data(), rv);
}
}
void NeverCalled(int) { ADD_FAILURE(); }
void EmptyCallback() {}
void QuitLoop() {
MessageLoop::current()->Quit();
}
} // namespace
class LocalFileStreamReaderTest : public testing::Test {
public:
LocalFileStreamReaderTest()
: message_loop_(MessageLoop::TYPE_IO),
file_thread_("FileUtilProxyTestFileThread") {}
virtual void SetUp() OVERRIDE {
ASSERT_TRUE(file_thread_.Start());
ASSERT_TRUE(dir_.CreateUniqueTempDir());
file_util::WriteFile(test_path(), kTestData, kTestDataSize);
base::PlatformFileInfo info;
ASSERT_TRUE(file_util::GetFileInfo(test_path(), &info));
test_file_modification_time_ = info.last_modified;
}
virtual void TearDown() OVERRIDE {
// Give another chance for deleted streams to perform Close.
MessageLoop::current()->RunUntilIdle();
file_thread_.Stop();
}
protected:
LocalFileStreamReader* CreateFileReader(
const base::FilePath& path,
int64 initial_offset,
const base::Time& expected_modification_time) {
return new LocalFileStreamReader(
file_task_runner(),
path,
initial_offset,
expected_modification_time);
}
void TouchTestFile() {
base::Time new_modified_time =
test_file_modification_time() - base::TimeDelta::FromSeconds(1);
ASSERT_TRUE(file_util::TouchFile(test_path(),
test_file_modification_time(),
new_modified_time));
}
base::MessageLoopProxy* file_task_runner() const {
return file_thread_.message_loop_proxy().get();
}
base::FilePath test_dir() const { return dir_.path(); }
base::FilePath test_path() const { return dir_.path().AppendASCII("test"); }
base::Time test_file_modification_time() const {
return test_file_modification_time_;
}
void EnsureFileTaskFinished() {
file_task_runner()->PostTaskAndReply(
FROM_HERE, base::Bind(&EmptyCallback), base::Bind(&QuitLoop));
MessageLoop::current()->Run();
}
private:
MessageLoop message_loop_;
base::Thread file_thread_;
base::ScopedTempDir dir_;
base::Time test_file_modification_time_;
};
TEST_F(LocalFileStreamReaderTest, NonExistent) {
base::FilePath nonexistent_path = test_dir().AppendASCII("nonexistent");
scoped_ptr<LocalFileStreamReader> reader(
CreateFileReader(nonexistent_path, 0, base::Time()));
int result = 0;
std::string data;
ReadFromReader(reader.get(), &data, 10, &result);
ASSERT_EQ(net::ERR_FILE_NOT_FOUND, result);
ASSERT_EQ(0U, data.size());
}
TEST_F(LocalFileStreamReaderTest, Empty) {
base::FilePath empty_path = test_dir().AppendASCII("empty");
base::PlatformFileError error = base::PLATFORM_FILE_OK;
base::PlatformFile file = base::CreatePlatformFile(
empty_path,
base::PLATFORM_FILE_CREATE | base::PLATFORM_FILE_READ,
NULL, &error);
ASSERT_EQ(base::PLATFORM_FILE_OK, error);
ASSERT_NE(base::kInvalidPlatformFileValue, file);
base::ClosePlatformFile(file);
scoped_ptr<LocalFileStreamReader> reader(
CreateFileReader(empty_path, 0, base::Time()));
int result = 0;
std::string data;
ReadFromReader(reader.get(), &data, 10, &result);
ASSERT_EQ(net::OK, result);
ASSERT_EQ(0U, data.size());
net::TestInt64CompletionCallback callback;
int64 length_result = reader->GetLength(callback.callback());
if (length_result == net::ERR_IO_PENDING)
length_result = callback.WaitForResult();
ASSERT_EQ(0, result);
}
TEST_F(LocalFileStreamReaderTest, GetLengthNormal) {
scoped_ptr<LocalFileStreamReader> reader(
CreateFileReader(test_path(), 0, test_file_modification_time()));
net::TestInt64CompletionCallback callback;
int64 result = reader->GetLength(callback.callback());
if (result == net::ERR_IO_PENDING)
result = callback.WaitForResult();
ASSERT_EQ(kTestDataSize, result);
}
TEST_F(LocalFileStreamReaderTest, GetLengthAfterModified) {
// Touch file so that the file's modification time becomes different
// from what we expect.
TouchTestFile();
scoped_ptr<LocalFileStreamReader> reader(
CreateFileReader(test_path(), 0, test_file_modification_time()));
net::TestInt64CompletionCallback callback;
int64 result = reader->GetLength(callback.callback());
if (result == net::ERR_IO_PENDING)
result = callback.WaitForResult();
ASSERT_EQ(net::ERR_UPLOAD_FILE_CHANGED, result);
// With NULL expected modification time this should work.
reader.reset(CreateFileReader(test_path(), 0, base::Time()));
result = reader->GetLength(callback.callback());
if (result == net::ERR_IO_PENDING)
result = callback.WaitForResult();
ASSERT_EQ(kTestDataSize, result);
}
TEST_F(LocalFileStreamReaderTest, GetLengthWithOffset) {
scoped_ptr<LocalFileStreamReader> reader(
CreateFileReader(test_path(), 3, base::Time()));
net::TestInt64CompletionCallback callback;
int64 result = reader->GetLength(callback.callback());
if (result == net::ERR_IO_PENDING)
result = callback.WaitForResult();
// Initial offset does not affect the result of GetLength.
ASSERT_EQ(kTestDataSize, result);
}
TEST_F(LocalFileStreamReaderTest, ReadNormal) {
scoped_ptr<LocalFileStreamReader> reader(
CreateFileReader(test_path(), 0, test_file_modification_time()));
int result = 0;
std::string data;
ReadFromReader(reader.get(), &data, kTestDataSize, &result);
ASSERT_EQ(net::OK, result);
ASSERT_EQ(kTestData, data);
}
TEST_F(LocalFileStreamReaderTest, ReadAfterModified) {
// Touch file so that the file's modification time becomes different
// from what we expect.
TouchTestFile();
scoped_ptr<LocalFileStreamReader> reader(
CreateFileReader(test_path(), 0, test_file_modification_time()));
int result = 0;
std::string data;
ReadFromReader(reader.get(), &data, kTestDataSize, &result);
ASSERT_EQ(net::ERR_UPLOAD_FILE_CHANGED, result);
ASSERT_EQ(0U, data.size());
// With NULL expected modification time this should work.
data.clear();
reader.reset(CreateFileReader(test_path(), 0, base::Time()));
ReadFromReader(reader.get(), &data, kTestDataSize, &result);
ASSERT_EQ(net::OK, result);
ASSERT_EQ(kTestData, data);
}
TEST_F(LocalFileStreamReaderTest, ReadWithOffset) {
scoped_ptr<LocalFileStreamReader> reader(
CreateFileReader(test_path(), 3, base::Time()));
int result = 0;
std::string data;
ReadFromReader(reader.get(), &data, kTestDataSize, &result);
ASSERT_EQ(net::OK, result);
ASSERT_EQ(&kTestData[3], data);
}
TEST_F(LocalFileStreamReaderTest, DeleteWithUnfinishedRead) {
scoped_ptr<LocalFileStreamReader> reader(
CreateFileReader(test_path(), 0, base::Time()));
net::TestCompletionCallback callback;
scoped_refptr<net::IOBufferWithSize> buf(
new net::IOBufferWithSize(kTestDataSize));
int rv = reader->Read(buf, buf->size(), base::Bind(&NeverCalled));
ASSERT_TRUE(rv == net::ERR_IO_PENDING || rv >= 0);
// Delete immediately.
// Should not crash; nor should NeverCalled be callback.
reader.reset();
EnsureFileTaskFinished();
}
} // namespace webkit_blob
| apache-2.0 |
skypies/flightdb | fgae/addtrackfragment.go | 4998 | package fgae
import(
"fmt"
"time"
"github.com/skypies/geo/sfo"
fdb "github.com/skypies/flightdb"
"github.com/skypies/flightdb/ref"
)
// {{{ currentAccumulationTrack
func currentAccumulationTrack(f *fdb.Flight) *fdb.Track {
if !f.HasTrack("ADSB") && !f.HasTrack("MLAT") { return nil }
if !f.HasTrack("ADSB") { return f.Tracks["MLAT"] }
if !f.HasTrack("MLAT") { return f.Tracks["ADSB"] }
mlat,adsb := f.Tracks["MLAT"],f.Tracks["ADSB"]
if len(*mlat) == 0 { return adsb }
if len(*adsb) == 0 { return mlat }
// Both tracks exist and are not empty ! Return most recent
if (*mlat).End().After( (*adsb).End() ) {
return mlat
} else {
return adsb
}
}
// }}}
// {{{ AddTrackFragment
func (db *FlightDB)AddTrackFragment(frag *fdb.TrackFragment, airframes *ref.AirframeCache, schedules *ref.ScheduleCache, perf map[string]time.Time) error {
perf["01_start"] = time.Now()
db.Debugf("* adding frag %d\n", len(frag.Track))
f,err := db.LookupMostRecent(db.NewQuery().ByIcaoId(frag.IcaoId))
if err != nil { return err }
perf["02_mostrecent"] = time.Now()
prefix := fmt.Sprintf("[%s/%s]%s %s", frag.IcaoId, frag.Callsign, frag.DataSystem, time.Now())
// If the fragment is strictly a suffix, this will hold the preceding point
var prevTP *fdb.Trackpoint
if f == nil {
f = fdb.NewFlightFromTrackFragment(frag)
f.DebugLog += "-- AddFrag "+prefix+": new IcaoID\n"
db.Debugf("* %s brand new IcaoID: %s", prefix, f)
} else {
db.Debugf("* %s found %s", prefix, f)
trackKey := frag.TrackName() // ADSB, or MLAT; this is the track we will accumulate into
// This is the most recent track we've accumulated into (could be ADSB, or MLAT); nil if none.
// Note that this doesn't have to be the same as trackKey; we might be adding ADSB, but already
// have some MLAT for the flight.
accTrack := currentAccumulationTrack(f)
if accTrack == nil {
f.DebugLog += "-- AddFrag "+prefix+": first frag on pre-existing flight\n"
db.Debugf("* %s no pre-existing track; adding right in", prefix)
f.Tracks[trackKey] = &frag.Track
} else if plausible,debug := accTrack.PlausibleContribution(&frag.Track); plausible==true {
perf["03_plausible"] = time.Now()
f.DebugLog += fmt.Sprintf("-- AddFrag %s: extending (adding %d to %d points)\n",
prefix, len(frag.Track), len(*accTrack))
db.Debugf("* %s extending track ... debug:\n%s", prefix, debug)
// For MLAT data, callsigns can take a while to show up in the stream
if f.Identity.Callsign == "" && frag.Callsign != "" {
f.DebugLog += fmt.Sprintf(" - prev callsign was nil; adding it in now\n")
f.Identity.Callsign = frag.Callsign
}
if !f.HasTrack(trackKey) {
// If the accTrack was a different type (MLAT vs. ADSB), then we'll need to init
f.Tracks[trackKey] = &fdb.Track{}
} else {
// Determine whether this frag is strictly a suffix to existing track data; this is the
// common case. If so, keep a pointer to the trackpoint that precedes the frag
n := len(*f.Tracks[trackKey])
if n>0 && (*f.Tracks[trackKey])[n-1].TimestampUTC.Before(frag.Track[0].TimestampUTC) {
db.Debugf("** new frag is strictly a suffix; prev = %d", n-1)
prevTP = &((*f.Tracks[trackKey])[n-1])
}
}
db.Debugf("* %s adding %d points to %d\n", prefix, len(frag.Track), len(*f.Tracks[trackKey]))
db.Debugf("** pre : %s", f.Tracks[trackKey])
f.Tracks[trackKey].Merge(&frag.Track)
db.Debugf("** post: %s", f.Tracks[trackKey])
} else {
perf["03_notplausible"] = time.Now()
f = fdb.NewFlightFromTrackFragment(frag)
f.DebugLog += "-- AddFrag "+prefix+": was not plausible, so new flight\n"
db.Debugf("* %s not a plausible addition; starting afresh ... debug\n%s", prefix, debug)
f.DebugLog += debug+"\n"
}
}
// Consult the airframe cache, and perhaps add some metadata, if not already present
if f.Airframe.Registration == "" {
if airframes != nil {
if af := airframes.Get(f.IcaoId); af != nil {
f.DebugLog += "-- AddFrag "+prefix+": found airframe\n"
f.OverlayAirframe(*af)
}
}
}
// There could be a big gap between the previous track and this frag.
// If that's the case, grab the preceding trackpoint and prefix this frag with it; then
// the waypoint detection code (which builds lines between points) will look at the gap
// between the frags, and maybe find extra waypoints.
if prevTP != nil {
// shift(x,a) : a = append([]T{x}, a...)
frag.Track = append([]fdb.Trackpoint{*prevTP}, frag.Track...)
}
perf["04_trackbuild"] = time.Now()
// Incrementally identify waypoints, frag by frag
for wp,t := range frag.Track.MatchWaypoints(sfo.KFixes) {
f.DebugLog += "-- AddFrag "+prefix+": found waypoint "+wp+"\n"
f.SetWaypoint(wp,t)
}
perf["05_waypoints"] = time.Now()
err = db.PersistFlight(f)
perf["06_persist"] = time.Now()
return err
}
// }}}
// {{{ -------------------------={ E N D }=----------------------------------
// Local variables:
// folded-file: t
// end:
// }}}
| apache-2.0 |
hemikak/siddhi | modules/siddhi-query-api/src/main/java/org/wso2/siddhi/query/api/execution/query/input/state/CountStateElement.java | 3116 | /*
* Copyright (c) 2005 - 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.wso2.siddhi.query.api.execution.query.input.state;
import org.wso2.siddhi.query.api.expression.constant.TimeConstant;
public class CountStateElement implements StateElement {
public static final int ANY = -1;
private StreamStateElement streamStateElement;
private TimeConstant within;
private int minCount = ANY;
private int maxCount = ANY;
public CountStateElement(StreamStateElement streamStateElement, int minCount, int maxCount, TimeConstant within) {
this.streamStateElement = streamStateElement;
this.within = within;
this.minCount = minCount;
this.maxCount = maxCount;
}
public CountStateElement(StreamStateElement streamStateElement, int minCount, int maxCount) {
this.streamStateElement = streamStateElement;
this.minCount = minCount;
this.maxCount = maxCount;
}
public int getMinCount() {
return minCount;
}
public int getMaxCount() {
return maxCount;
}
public StreamStateElement getStreamStateElement() {
return streamStateElement;
}
@Override
public TimeConstant getWithin() {
return within;
}
public void setWithin(TimeConstant within) {
this.within = within;
}
@Override
public String toString() {
return "CountStateElement{" +
"streamStateElement=" + streamStateElement +
", within=" + within +
", minCount=" + minCount +
", maxCount=" + maxCount +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof CountStateElement)) return false;
CountStateElement that = (CountStateElement) o;
if (maxCount != that.maxCount) return false;
if (minCount != that.minCount) return false;
if (streamStateElement != null ? !streamStateElement.equals(that.streamStateElement) : that.streamStateElement != null)
return false;
if (within != null ? !within.equals(that.within) : that.within != null) return false;
return true;
}
@Override
public int hashCode() {
int result = streamStateElement != null ? streamStateElement.hashCode() : 0;
result = 31 * result + (within != null ? within.hashCode() : 0);
result = 31 * result + minCount;
result = 31 * result + maxCount;
return result;
}
}
| apache-2.0 |
apache/karaf-cellar | shell/src/main/java/org/apache/karaf/cellar/shell/handler/HandlersSupport.java | 3988 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.karaf.cellar.shell.handler;
import org.apache.karaf.cellar.core.Node;
import org.apache.karaf.cellar.core.control.ManageHandlersCommand;
import org.apache.karaf.cellar.core.control.ManageHandlersResult;
import org.apache.karaf.cellar.shell.ClusterCommandSupport;
import org.apache.karaf.shell.support.table.ShellTable;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Generic cluster event handler shell command support.
*/
public abstract class HandlersSupport extends ClusterCommandSupport {
protected Object doExecute(String handlerName, List<String> nodeIdsOrAliases, Boolean status) throws Exception {
ManageHandlersCommand command = new ManageHandlersCommand(clusterManager.generateId());
command.setTimeout(timeout * 1000);
// looking for nodes and check if exist
Set<Node> recipientList = new HashSet<Node>();
if (nodeIdsOrAliases != null && !nodeIdsOrAliases.isEmpty()) {
for (String nodeIdOrAlias : nodeIdsOrAliases) {
Node node = clusterManager.findNodeByIdOrAlias(nodeIdOrAlias);
if (node == null) {
System.err.println("Cluster node " + nodeIdOrAlias + " doesn't exist");
} else {
recipientList.add(node);
}
}
} else {
if (status == null) {
// in case of status display, select all nodes
recipientList = clusterManager.listNodes();
} else {
// in case of status change, select only the local node
recipientList.add(clusterManager.getNode());
}
}
if (recipientList.size() < 1) {
return null;
}
command.setDestination(recipientList);
command.setHandlerName(handlerName);
command.setStatus(status);
Map<Node, ManageHandlersResult> results = executionContext.execute(command);
if (results == null || results.isEmpty()) {
System.out.println("No result received within given timeout");
} else {
ShellTable table = new ShellTable();
table.column(" ");
table.column("Node");
table.column("Status");
table.column("Event Handler");
for (Map.Entry<Node,ManageHandlersResult> handlersResultEntry : results.entrySet()) {
Node node = handlersResultEntry.getKey();
String local = "";
if (node.equals(clusterManager.getNode())) {
local = "x";
}
ManageHandlersResult result = handlersResultEntry.getValue();
if (result != null && result.getHandlers() != null) {
for (Map.Entry<String,String> handlerEntry: result.getHandlers().entrySet()) {
String handler = handlerEntry.getKey();
String s = handlerEntry.getValue();
String nodeName = node.getAlias();
if (nodeName == null) {
nodeName = node.getId();
}
table.addRow().addContent(local, nodeName, s, handler);
}
}
}
table.print(System.out);
}
return null;
}
}
| apache-2.0 |
Orange-OpenSource/cf-java-client | integration-test/src/test/java/org/cloudfoundry/uaa/ClientsTest.java | 22393 | /*
* Copyright 2013-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.uaa;
import io.netty.util.AsciiString;
import org.cloudfoundry.AbstractIntegrationTest;
import org.cloudfoundry.UnknownCloudFoundryException;
import org.cloudfoundry.uaa.clients.BatchChangeSecretRequest;
import org.cloudfoundry.uaa.clients.BatchChangeSecretResponse;
import org.cloudfoundry.uaa.clients.BatchCreateClientsRequest;
import org.cloudfoundry.uaa.clients.BatchCreateClientsResponse;
import org.cloudfoundry.uaa.clients.BatchDeleteClientsRequest;
import org.cloudfoundry.uaa.clients.BatchUpdateClientsRequest;
import org.cloudfoundry.uaa.clients.BatchUpdateClientsResponse;
import org.cloudfoundry.uaa.clients.ChangeSecret;
import org.cloudfoundry.uaa.clients.ChangeSecretRequest;
import org.cloudfoundry.uaa.clients.Client;
import org.cloudfoundry.uaa.clients.CreateClient;
import org.cloudfoundry.uaa.clients.CreateClientAction;
import org.cloudfoundry.uaa.clients.CreateClientRequest;
import org.cloudfoundry.uaa.clients.CreateClientResponse;
import org.cloudfoundry.uaa.clients.DeleteClientAction;
import org.cloudfoundry.uaa.clients.DeleteClientRequest;
import org.cloudfoundry.uaa.clients.GetClientRequest;
import org.cloudfoundry.uaa.clients.GetMetadataRequest;
import org.cloudfoundry.uaa.clients.GetMetadataResponse;
import org.cloudfoundry.uaa.clients.ListClientsRequest;
import org.cloudfoundry.uaa.clients.ListClientsResponse;
import org.cloudfoundry.uaa.clients.ListMetadatasRequest;
import org.cloudfoundry.uaa.clients.ListMetadatasResponse;
import org.cloudfoundry.uaa.clients.MixedActionsRequest;
import org.cloudfoundry.uaa.clients.UpdateClient;
import org.cloudfoundry.uaa.clients.UpdateClientAction;
import org.cloudfoundry.uaa.clients.UpdateClientRequest;
import org.cloudfoundry.uaa.clients.UpdateMetadataRequest;
import org.cloudfoundry.uaa.clients.UpdateMetadataResponse;
import org.cloudfoundry.uaa.clients.UpdateSecretAction;
import org.cloudfoundry.uaa.clients.UpdateSecretClientAction;
import org.cloudfoundry.util.PaginationUtils;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.ipc.netty.http.client.HttpClientException;
import reactor.test.StepVerifier;
import java.time.Duration;
import java.util.Base64;
import java.util.concurrent.TimeoutException;
import static org.assertj.core.api.Assertions.assertThat;
import static org.cloudfoundry.uaa.tokens.GrantType.CLIENT_CREDENTIALS;
import static org.cloudfoundry.uaa.tokens.GrantType.IMPLICIT;
import static org.cloudfoundry.uaa.tokens.GrantType.PASSWORD;
import static org.cloudfoundry.uaa.tokens.GrantType.REFRESH_TOKEN;
public final class ClientsTest extends AbstractIntegrationTest {
@Autowired
private String clientId;
@Autowired
private UaaClient uaaClient;
@Test
public void batchChangeSecret() throws TimeoutException, InterruptedException {
String clientId1 = this.nameFactory.getClientId();
String clientId2 = this.nameFactory.getClientId();
String clientSecret = this.nameFactory.getClientSecret();
String newClientSecret1 = this.nameFactory.getClientSecret();
String newClientSecret2 = this.nameFactory.getClientSecret();
requestCreateClient(this.uaaClient, clientId1, clientSecret)
.then(requestCreateClient(this.uaaClient, clientId2, clientSecret))
.then(this.uaaClient.clients()
.batchChangeSecret(BatchChangeSecretRequest.builder()
.changeSecret(ChangeSecret.builder()
.clientId(clientId1)
.oldSecret(clientSecret)
.secret(newClientSecret1)
.build(),
ChangeSecret.builder()
.clientId(clientId2)
.oldSecret(clientSecret)
.secret(newClientSecret2)
.build())
.build()))
.flatMapIterable(BatchChangeSecretResponse::getClients)
.as(StepVerifier::create)
.expectNextCount(2)
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void batchCreate() throws TimeoutException, InterruptedException {
String clientId1 = this.nameFactory.getClientId();
String clientId2 = this.nameFactory.getClientId();
String clientSecret = this.nameFactory.getClientSecret();
this.uaaClient.clients()
.batchCreate(BatchCreateClientsRequest.builder()
.client(CreateClient.builder()
.approvalsDeleted(true)
.authorizedGrantType(PASSWORD)
.clientId(clientId1)
.clientSecret(clientSecret)
.scope("client.read", "client.write")
.tokenSalt("test-token-salt")
.build())
.client(CreateClient.builder()
.approvalsDeleted(true)
.authorizedGrantType(PASSWORD, REFRESH_TOKEN)
.clientId(clientId2)
.clientSecret(clientSecret)
.scope("client.write")
.tokenSalt("filtered-test-token-salt")
.build())
.build())
.flatMapIterable(BatchCreateClientsResponse::getClients)
.filter(client -> clientId1.equals(client.getClientId()))
.as(StepVerifier::create)
.consumeNextWith(response -> {
assertThat(response.getAuthorizedGrantTypes()).containsExactly(PASSWORD, REFRESH_TOKEN);
assertThat(response.getClientId()).isEqualTo(clientId1);
assertThat(response.getScopes()).containsExactly("client.read", "client.write");
assertThat(response.getTokenSalt()).isEqualTo("test-token-salt");
})
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void batchDelete() throws TimeoutException, InterruptedException {
String clientId1 = this.nameFactory.getClientId();
String clientId2 = this.nameFactory.getClientId();
String clientSecret = this.nameFactory.getClientSecret();
batchCreateClients(this.uaaClient, clientId1, clientId2, clientSecret)
.flatMapIterable(BatchCreateClientsResponse::getClients)
.map(Client::getClientId)
.collectList()
.then(clientIds -> this.uaaClient.clients()
.batchDelete(BatchDeleteClientsRequest.builder()
.clientIds(clientIds)
.build()))
.flatMap(ignore -> requestListClients(this.uaaClient))
.filter(client -> clientId1.equals(client.getClientId()) || clientId2.equals(client.getClientId()))
.as(StepVerifier::create)
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void batchUpdate() throws TimeoutException, InterruptedException {
String clientId1 = this.nameFactory.getClientId();
String clientId2 = this.nameFactory.getClientId();
String clientSecret = this.nameFactory.getClientSecret();
requestCreateClient(this.uaaClient, clientId1, clientSecret)
.then(requestCreateClient(this.uaaClient, clientId2, clientSecret))
.then(this.uaaClient.clients()
.batchUpdate(BatchUpdateClientsRequest.builder()
.client(UpdateClient.builder()
.authorizedGrantType(CLIENT_CREDENTIALS, IMPLICIT)
.clientId(clientId1)
.name("test-name")
.scope("client.read", "client.write")
.tokenSalt("test-token-salt")
.build(),
UpdateClient.builder()
.authorizedGrantType(PASSWORD)
.clientId(clientId2)
.name("filtered-test-name")
.scope("client.write")
.tokenSalt("filtered-test-token-salt")
.build())
.build()))
.flatMapIterable(BatchUpdateClientsResponse::getClients)
.filter(client -> clientId1.equals(client.getClientId()))
.as(StepVerifier::create)
.consumeNextWith(response -> {
assertThat(response.getAuthorizedGrantTypes()).containsExactly(IMPLICIT, CLIENT_CREDENTIALS);
assertThat(response.getClientId()).isEqualTo(clientId1);
assertThat(response.getName()).isEqualTo("test-name");
assertThat(response.getScopes()).containsExactly("client.read", "client.write");
assertThat(response.getTokenSalt()).isEqualTo("test-token-salt");
})
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void changeSecret() throws TimeoutException, InterruptedException {
String clientId = this.nameFactory.getClientId();
String newClientSecret = this.nameFactory.getClientSecret();
String oldClientSecret = this.nameFactory.getClientSecret();
requestCreateClient(this.uaaClient, clientId, oldClientSecret)
.then(this.uaaClient.clients()
.changeSecret(ChangeSecretRequest.builder()
.clientId(clientId)
.oldSecret(oldClientSecret)
.secret(newClientSecret)
.build()))
.as(StepVerifier::create)
// TODO: Update test based on https://www.pivotaltracker.com/n/projects/997278/stories/130645469 to use the following
// .expectThat(response -> {
// assertEquals("secret updated", response.getMessage());
// assertEquals("ok", response.getStatus());
// }));
.consumeErrorWith(t -> assertThat(t).isInstanceOf(UaaException.class).hasMessage("invalid_client: Only a client can change client secret"))
.verify(Duration.ofMinutes(5));
}
@Test
public void create() throws TimeoutException, InterruptedException {
String clientId = this.nameFactory.getClientId();
String clientSecret = this.nameFactory.getClientSecret();
this.uaaClient.clients()
.create(CreateClientRequest.builder()
.approvalsDeleted(true)
.authorizedGrantType(PASSWORD)
.clientId(clientId)
.clientSecret(clientSecret)
.scope("client.read", "client.write")
.tokenSalt("test-token-salt")
.build())
.as(StepVerifier::create)
.consumeNextWith(response -> {
assertThat(response.getAuthorizedGrantTypes()).containsExactly(PASSWORD, REFRESH_TOKEN);
assertThat(response.getClientId()).isEqualTo(clientId);
assertThat(response.getScopes()).containsExactly("client.read", "client.write");
assertThat(response.getTokenSalt()).isEqualTo("test-token-salt");
})
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void delete() throws TimeoutException, InterruptedException {
String clientId = this.nameFactory.getClientId();
String clientSecret = this.nameFactory.getClientSecret();
requestCreateClient(this.uaaClient, clientId, clientSecret)
.then(this.uaaClient.clients()
.delete(DeleteClientRequest.builder()
.clientId(clientId)
.build()))
.flatMap(ignore -> requestListClients(this.uaaClient))
.filter(client -> clientId.equals(client.getClientId()))
.as(StepVerifier::create)
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void get() throws TimeoutException, InterruptedException {
String clientId = this.nameFactory.getClientId();
String clientSecret = this.nameFactory.getClientSecret();
requestCreateClient(this.uaaClient, clientId, clientSecret)
.then(this.uaaClient.clients()
.get(GetClientRequest.builder()
.clientId(clientId)
.build()))
.as(StepVerifier::create)
.consumeNextWith(response -> {
assertThat(response.getAuthorizedGrantTypes()).containsExactly(PASSWORD, REFRESH_TOKEN);
assertThat(response.getClientId()).isEqualTo(clientId);
})
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void getMetadata() throws TimeoutException, InterruptedException {
requestUpdateMetadata(this.uaaClient, this.clientId, "http://test.get.url")
.then(this.uaaClient.clients()
.getMetadata(GetMetadataRequest.builder()
.clientId(this.clientId)
.build()))
.as(StepVerifier::create)
.consumeNextWith(metadata -> {
assertThat(metadata.getAppLaunchUrl()).isEqualTo("http://test.get.url");
assertThat(metadata.getClientId()).isEqualTo(this.clientId);
})
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void list() throws TimeoutException, InterruptedException {
String clientId = this.nameFactory.getClientId();
String clientSecret = this.nameFactory.getClientSecret();
requestCreateClient(this.uaaClient, clientId, clientSecret)
.then(this.uaaClient.clients()
.list(ListClientsRequest.builder()
.build()))
.flatMapIterable(ListClientsResponse::getResources)
.filter(client -> clientId.equals(client.getClientId()))
.as(StepVerifier::create)
.expectNextCount(1)
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void listMetadatas() throws TimeoutException, InterruptedException {
requestUpdateMetadata(this.uaaClient, this.clientId, "http://test.list.url")
.then(this.uaaClient.clients()
.listMetadatas(ListMetadatasRequest.builder()
.build()))
.flatMapIterable(ListMetadatasResponse::getMetadatas)
.filter(metadata -> this.clientId.equals(metadata.getClientId()))
.single()
.as(StepVerifier::create)
.consumeNextWith(metadata -> {
assertThat(metadata.getAppLaunchUrl()).isEqualTo("http://test.list.url");
assertThat(metadata.getClientId()).isEqualTo(this.clientId);
})
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void mixedActions() throws TimeoutException, InterruptedException {
String clientId1 = this.nameFactory.getClientId();
String clientId2 = this.nameFactory.getClientId();
String clientSecret = this.nameFactory.getClientSecret();
String newClientSecret = this.nameFactory.getClientSecret();
this.uaaClient.clients()
.mixedActions(MixedActionsRequest.builder()
.action(CreateClientAction.builder()
.authorizedGrantType(PASSWORD)
.clientId(clientId1)
.clientSecret(clientSecret)
.name("test-name-old")
.build())
.action(CreateClientAction.builder()
.authorizedGrantType(PASSWORD)
.clientId(clientId2)
.clientSecret(clientSecret)
.build())
.action(UpdateClientAction.builder()
.authorizedGrantType(PASSWORD)
.clientId(clientId1)
.name("test-name-temporary")
.build())
.action(UpdateSecretAction.builder()
.clientId(clientId2)
.secret(newClientSecret)
.build())
.action(DeleteClientAction.builder()
.clientId(clientId2)
.build())
.action(UpdateSecretClientAction.builder()
.authorizedGrantType(PASSWORD)
.name("test-name-new")
.clientId(clientId1)
.secret(newClientSecret)
.build())
.build())
.flatMap(ignore -> requestListClients(this.uaaClient))
.filter(client -> clientId1.equals(client.getClientId()))
.as(StepVerifier::create)
.consumeNextWith(client -> assertThat(client.getName()).isEqualTo("test-name-new"))
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void update() throws TimeoutException, InterruptedException {
String clientId = this.nameFactory.getClientId();
String clientSecret = this.nameFactory.getClientSecret();
requestCreateClient(this.uaaClient, clientId, clientSecret)
.then(this.uaaClient.clients()
.update(UpdateClientRequest.builder()
.authorizedGrantType(CLIENT_CREDENTIALS)
.clientId(clientId)
.name("test-name")
.build()))
.flatMap(ignore -> requestListClients(this.uaaClient))
.filter(client -> clientId.equals(client.getClientId()))
.as(StepVerifier::create)
.consumeNextWith(response -> {
assertThat(response.getAuthorizedGrantTypes()).containsExactly(CLIENT_CREDENTIALS);
assertThat(response.getClientId()).isEqualTo(clientId);
assertThat(response.getName()).isEqualTo("test-name");
})
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void updateMetadata() throws TimeoutException, InterruptedException {
String appIcon = Base64.getEncoder().encodeToString(new AsciiString("test-image").toByteArray());
this.uaaClient.clients()
.updateMetadata(UpdateMetadataRequest.builder()
.appIcon(appIcon)
.appLaunchUrl("http://test.app.launch.url")
.clientId(this.clientId)
.showOnHomePage(true)
.clientName("test-name")
.build())
.then(requestGetMetadata(this.uaaClient, this.clientId))
.as(StepVerifier::create)
.consumeNextWith(metadata -> {
assertThat(metadata.getAppIcon()).isEqualTo(appIcon);
assertThat(metadata.getAppLaunchUrl()).isEqualTo("http://test.app.launch.url");
assertThat(metadata.getClientId()).isEqualTo(this.clientId);
assertThat(metadata.getClientName()).isEqualTo("test-name");
assertThat(metadata.getShowOnHomePage()).isTrue();
})
.expectComplete()
.verify(Duration.ofMinutes(5));
}
private static Mono<BatchCreateClientsResponse> batchCreateClients(UaaClient uaaClient, String clientId1, String clientId2, String clientSecret) {
return uaaClient.clients()
.batchCreate(BatchCreateClientsRequest.builder()
.client(CreateClient.builder()
.approvalsDeleted(true)
.authorizedGrantType(PASSWORD)
.clientId(clientId1)
.clientSecret(clientSecret)
.scope("client.read", "client.write")
.tokenSalt("test-token-salt")
.build())
.client(CreateClient.builder()
.approvalsDeleted(true)
.authorizedGrantType(PASSWORD, REFRESH_TOKEN)
.clientId(clientId2)
.clientSecret(clientSecret)
.scope("client.write")
.tokenSalt("alternate-test-token-salt")
.build())
.build());
}
private static Mono<CreateClientResponse> requestCreateClient(UaaClient uaaClient, String clientId, String clientSecret) {
return uaaClient.clients()
.create(CreateClientRequest.builder()
.authorizedGrantType(PASSWORD)
.clientId(clientId)
.clientSecret(clientSecret)
.build());
}
private static Mono<GetMetadataResponse> requestGetMetadata(UaaClient uaaClient, String clientId) {
return uaaClient.clients()
.getMetadata(GetMetadataRequest.builder()
.clientId(clientId)
.build());
}
private static Flux<Client> requestListClients(UaaClient uaaClient) {
return PaginationUtils
.requestUaaResources(startIndex -> uaaClient.clients()
.list(ListClientsRequest.builder()
.startIndex(startIndex)
.build()));
}
private static Mono<UpdateMetadataResponse> requestUpdateMetadata(UaaClient uaaClient, String clientId, String appLaunchUrl) {
return uaaClient.clients()
.updateMetadata(UpdateMetadataRequest.builder()
.appLaunchUrl(appLaunchUrl)
.clientId(clientId)
.build());
}
}
| apache-2.0 |
sassoftware/conary | conary_test/cvctest/derivetest.py | 2849 | #
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from conary_test import recipes
from conary_test import rephelp
from conary import versions
from conary.build import derive
class DeriveTest(rephelp.RepositoryHelper):
def testDerivePackage(self):
self.addComponent('simple:source=1-1',
[('simple.recipe', recipes.simpleRecipe)])
self.addComponent('simple:runtime=1-1-1', [('/foo', 'contents\n')])
self.addCollection('simple=1-1-1', [':runtime'])
self.addComponent('simple:source=1-2',
[('simple.recipe', recipes.simpleRecipe + '\n')])
derive.derive(self.openRepository(), self.cfg,
versions.Label('localhost@rpl:branch'),
'simple=localhost@rpl:linux',
checkoutDir = self.workDir + '/foo',
extract = True)
recipe = open(self.workDir + '/foo/simple.recipe').read()
self.assertEquals(recipe, """
class SimpleRecipe(DerivedPackageRecipe):
name = 'simple'
version = '1'
def setup(r):
'''
In this recipe, you can make modifications to the package.
Examples:
# This appliance has high-memory-use PHP scripts
r.Replace('memory_limit = 8M', 'memory_limit = 32M', '/etc/php.ini')
# This appliance uses PHP as a command interpreter but does
# not include a web server, so remove the file that creates
# a dependency on the web server
r.Remove('/etc/httpd/conf.d/php.conf')
# This appliance requires that a few binaries be replaced
# with binaries built from a custom archive that includes
# a Makefile that honors the DESTDIR variable for its
# install target.
r.addArchive('foo.tar.gz')
r.Make()
r.MakeInstall()
# This appliance requires an extra configuration file
r.Create('/etc/myconfigfile', contents='some data')
'''
""")
self.verifyFile(self.workDir + '/foo/_ROOT_/foo', 'contents\n')
self.verifyFile(self.workDir + '/foo/_OLD_ROOT_/foo', 'contents\n')
self.assertEquals(
sorted(os.listdir(os.path.join(self.workDir, 'foo'))),
sorted(['CONARY', '_ROOT_', 'simple.recipe', '_OLD_ROOT_']))
| apache-2.0 |
aws/aws-sdk-go | service/personalizeruntime/service.go | 3647 | // Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package personalizeruntime
import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/client"
"github.com/aws/aws-sdk-go/aws/client/metadata"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/aws/signer/v4"
"github.com/aws/aws-sdk-go/private/protocol"
"github.com/aws/aws-sdk-go/private/protocol/restjson"
)
// PersonalizeRuntime provides the API operation methods for making requests to
// Amazon Personalize Runtime. See this package's package overview docs
// for details on the service.
//
// PersonalizeRuntime methods are safe to use concurrently. It is not safe to
// modify mutate any of the struct's properties though.
type PersonalizeRuntime struct {
*client.Client
}
// Used for custom client initialization logic
var initClient func(*client.Client)
// Used for custom request initialization logic
var initRequest func(*request.Request)
// Service information constants
const (
ServiceName = "Personalize Runtime" // Name of service.
EndpointsID = "personalize-runtime" // ID to lookup a service endpoint with.
ServiceID = "Personalize Runtime" // ServiceID is a unique identifier of a specific service.
)
// New creates a new instance of the PersonalizeRuntime client with a session.
// If additional configuration is needed for the client instance use the optional
// aws.Config parameter to add your extra config.
//
// Example:
// mySession := session.Must(session.NewSession())
//
// // Create a PersonalizeRuntime client from just a session.
// svc := personalizeruntime.New(mySession)
//
// // Create a PersonalizeRuntime client with additional configuration
// svc := personalizeruntime.New(mySession, aws.NewConfig().WithRegion("us-west-2"))
func New(p client.ConfigProvider, cfgs ...*aws.Config) *PersonalizeRuntime {
c := p.ClientConfig(EndpointsID, cfgs...)
if c.SigningNameDerived || len(c.SigningName) == 0 {
c.SigningName = "personalize"
}
return newClient(*c.Config, c.Handlers, c.PartitionID, c.Endpoint, c.SigningRegion, c.SigningName, c.ResolvedRegion)
}
// newClient creates, initializes and returns a new service client instance.
func newClient(cfg aws.Config, handlers request.Handlers, partitionID, endpoint, signingRegion, signingName, resolvedRegion string) *PersonalizeRuntime {
svc := &PersonalizeRuntime{
Client: client.New(
cfg,
metadata.ClientInfo{
ServiceName: ServiceName,
ServiceID: ServiceID,
SigningName: signingName,
SigningRegion: signingRegion,
PartitionID: partitionID,
Endpoint: endpoint,
APIVersion: "2018-05-22",
ResolvedRegion: resolvedRegion,
},
handlers,
),
}
// Handlers
svc.Handlers.Sign.PushBackNamed(v4.SignRequestHandler)
svc.Handlers.Build.PushBackNamed(restjson.BuildHandler)
svc.Handlers.Unmarshal.PushBackNamed(restjson.UnmarshalHandler)
svc.Handlers.UnmarshalMeta.PushBackNamed(restjson.UnmarshalMetaHandler)
svc.Handlers.UnmarshalError.PushBackNamed(
protocol.NewUnmarshalErrorHandler(restjson.NewUnmarshalTypedError(exceptionFromCode)).NamedHandler(),
)
// Run custom client initialization if present
if initClient != nil {
initClient(svc.Client)
}
return svc
}
// newRequest creates a new request for a PersonalizeRuntime operation and runs any
// custom request initialization.
func (c *PersonalizeRuntime) newRequest(op *request.Operation, params, data interface{}) *request.Request {
req := c.NewRequest(op, params, data)
// Run custom request initialization if present
if initRequest != nil {
initRequest(req)
}
return req
}
| apache-2.0 |
xuwei-k/nscala-time | src/main/scala/com/github/nscala_time/time/RichLocalDateTimeProperty.scala | 1458 | /**
* Copyright 2009 Jorge Ortiz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**/
package com.github.nscala_time.time
import java.util.Locale
import org.joda.time._
import com.github.nscala_time.PimpedType
class RichLocalDateTimeProperty(val underlying: LocalDateTime.Property) extends AnyVal
with PimpedType[LocalDateTime.Property] {
def localDateTime: LocalDateTime = underlying.getLocalDateTime
def roundFloor: LocalDateTime = underlying.roundFloorCopy
def roundCeiling: LocalDateTime = underlying.roundCeilingCopy
def roundDown: LocalDateTime = underlying.roundFloorCopy
def roundUp: LocalDateTime = underlying.roundCeilingCopy
def round: LocalDateTime = underlying.roundHalfEvenCopy
def apply(value: Int): LocalDateTime = underlying.setCopy(value)
def apply(text: String): LocalDateTime = underlying.setCopy(text)
def apply(text: String, locale: Locale): LocalDateTime = underlying.setCopy(text, locale)
}
| apache-2.0 |
nenko-tabakov/strongbox | strongbox-user-management/src/main/java/org/carlspring/strongbox/users/domain/User.java | 3398 | package org.carlspring.strongbox.users.domain;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.carlspring.strongbox.data.domain.GenericEntity;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
/**
* An application user
*/
public class User
extends GenericEntity
{
private String username;
private String password;
private boolean enabled;
private String salt;
private Set<String> roles;
private String securityTokenKey;
public User()
{
roles = new HashSet<>();
}
public User(String id,
String username,
String password,
boolean enabled,
String salt,
Set<String> roles)
{
this.id = id;
this.username = username;
this.password = password;
this.enabled = enabled;
this.salt = salt;
this.roles = roles;
}
public String getUsername()
{
return username;
}
public void setUsername(final String username)
{
this.username = username;
}
public String getPassword()
{
return password;
}
public void setPassword(final String password)
{
this.password = password;
}
public boolean isEnabled()
{
return enabled;
}
public void setEnabled(final boolean enabled)
{
this.enabled = enabled;
}
public String getSalt()
{
return salt;
}
public void setSalt(final String salt)
{
this.salt = salt;
}
public Set<String> getRoles()
{
return roles;
}
public void setRoles(final Set<String> roles)
{
this.roles = roles;
}
public String getSecurityTokenKey()
{
return securityTokenKey;
}
public void setSecurityTokenKey(String securityTokenKey)
{
this.securityTokenKey = securityTokenKey;
}
@Override
public boolean equals(Object o)
{
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
User user = (User) o;
return enabled == user.enabled &&
Objects.equal(id, user.id) &&
Objects.equal(username, user.username) &&
Objects.equal(password, user.password) &&
Objects.equal(salt, user.salt) &&
Objects.equal(roles, user.roles) &&
Objects.equal(detachAll, user.detachAll);
}
@Override
public int hashCode()
{
return Objects.hashCode(id, username, password, enabled, salt, roles, detachAll);
}
@Override
public String toString()
{
return MoreObjects.toStringHelper(this)
.add("id", getId())
.add("username", getUsername())
.add("password", getPassword())
.add("enabled", isEnabled())
.add("salt", getSalt())
.add("roles", getRoles())
.add("securityToken", StringUtils.isEmpty(getSecurityTokenKey()) ? "[EMPTY]" : "[SEECRET]")
.add("detachAll", detachAll)
.add("version", version)
.toString();
}
}
| apache-2.0 |
Lab41/tinkerpop3 | gremlin-algorithm/src/main/java/com/tinkerpop/gremlin/algorithm/generator/CommunityGenerator.java | 9757 | package com.tinkerpop.gremlin.algorithm.generator;
import com.tinkerpop.gremlin.structure.Edge;
import com.tinkerpop.gremlin.structure.Graph;
import com.tinkerpop.gremlin.structure.Vertex;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Supplier;
/**
* Generates a synthetic network with a community structure, that is, several densely connected
* sub-networks that are loosely connected with one another.
*
* @author Matthias Broecheler (me@matthiasb.com)
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
public class CommunityGenerator extends AbstractGenerator {
public static final double DEFAULT_CROSS_COMMUNITY_PERCENTAGE = 0.1;
public static final int DEFAULT_NUMBER_OF_COMMUNITIES = 2;
private final Distribution communitySize;
private final Distribution edgeDegree;
private final double crossCommunityPercentage;
private final Iterable<Vertex> vertices;
private final int expectedNumCommunities;
private final int expectedNumEdges;
private final Random random;
private CommunityGenerator(final Graph g, final String label, final Optional<Consumer<Edge>> edgeProcessor,
final Optional<BiConsumer<Vertex, Map<String, Object>>> vertexProcessor,
final Supplier<Long> seedGenerator, final Distribution communitySize,
final Distribution edgeDegree, final double crossCommunityPercentage,
final Iterable<Vertex> vertices, final int expectedNumCommunities,
final int expectedNumEdges) {
super(g, label, edgeProcessor, vertexProcessor, seedGenerator);
random = new Random(this.seedSupplier.get());
this.communitySize = communitySize;
this.edgeDegree = edgeDegree;
this.crossCommunityPercentage = crossCommunityPercentage;
this.vertices = vertices;
this.expectedNumCommunities = expectedNumCommunities;
this.expectedNumEdges = expectedNumEdges;
}
/**
* Generates a synthetic network for provided vertices in the given graph such that the provided expected number
* of communities are generated with the specified expected number of edges.
*
* @return The actual number of edges generated. May be different from the expected number.
*/
@Override
public int generate() {
int numVertices = SizableIterable.sizeOf(vertices);
final Iterator<Vertex> iter = vertices.iterator();
final ArrayList<ArrayList<Vertex>> communities = new ArrayList<>(expectedNumCommunities);
final Distribution communityDist = communitySize.initialize(expectedNumCommunities, numVertices);
final Map<String, Object> context = new HashMap<>();
while (iter.hasNext()) {
final int nextSize = communityDist.nextValue(random);
context.put("communityIndex", communities.size());
final ArrayList<Vertex> community = new ArrayList<>(nextSize);
for (int i = 0; i < nextSize && iter.hasNext(); i++) {
community.add(processVertex(iter.next(), context));
}
if (!community.isEmpty()) communities.add(community);
}
final double inCommunityPercentage = 1.0 - crossCommunityPercentage;
final Distribution degreeDist = edgeDegree.initialize(numVertices, expectedNumEdges);
if (crossCommunityPercentage > 0 && communities.size() < 2)
throw new IllegalArgumentException("Cannot have cross links with only one community");
int addedEdges = 0;
//System.out.println("Generating links on communities: "+communities.size());
for (ArrayList<Vertex> community : communities) {
for (Vertex v : community) {
final int randomDegree = degreeDist.nextValue(random);
final int degree = Math.min(randomDegree, (int) Math.ceil((community.size() - 1) / inCommunityPercentage) - 1);
final Set<Vertex> inlinks = new HashSet<>();
final Set<Vertex> outlinks = new HashSet<>();
for (int i = 0; i < degree; i++) {
Vertex selected = null;
if (random.nextDouble() < crossCommunityPercentage || (community.size() - 1 <= inlinks.size())) {
//Cross community
int tries = 0;
ArrayList<Vertex> othercomm = null;
// this limit on the number of tries prevents infinite loop where the selected vertex to
// link to doesn't exist given the nature and structure of the graph.
while (null == selected && tries < 100) {
// choose another community to connect to and make sure it's not in the current
// community of the current vertex
while (null == othercomm) {
othercomm = communities.get(random.nextInt(communities.size()));
if (othercomm.equals(community)) othercomm = null;
}
selected = othercomm.get(random.nextInt(othercomm.size()));
if (outlinks.contains(selected)) selected = null;
tries++;
}
// if tries expires then the value of selected is null in which case it should not be added.
if (selected != null) outlinks.add(selected);
} else {
//In community
int tries = 0;
while (selected == null && tries < 100) {
selected = community.get(random.nextInt(community.size()));
if (v.equals(selected) || inlinks.contains(selected)) selected = null;
tries++;
}
if (selected != null) inlinks.add(selected);
}
// only add an edge if the vertex was actually selected.
if (selected != null) {
addEdge(v, selected);
addedEdges++;
}
}
}
}
return addedEdges;
}
public static Builder build(final Graph g) {
return new Builder(g);
}
public static class Builder extends AbstractGeneratorBuilder<Builder> {
private final Graph g;
private Distribution communitySize = null;
private Distribution edgeDegree = null;
private double crossCommunityPercentage = DEFAULT_CROSS_COMMUNITY_PERCENTAGE;
private Iterable<Vertex> vertices;
private int expectedNumCommunities = DEFAULT_NUMBER_OF_COMMUNITIES;
private int expectedNumEdges;
private Builder(final Graph g) {
super(Builder.class);
this.g = g;
final List<Vertex> allVertices = g.V().toList();
this.vertices = allVertices;
this.expectedNumEdges = allVertices.size() * 2;
}
public Builder verticesToGenerateEdgesFor(final Iterable<Vertex> vertices) {
this.vertices = vertices;
return this;
}
public Builder expectedNumCommunities(final int expectedNumCommunities) {
this.expectedNumCommunities = expectedNumCommunities;
return this;
}
public Builder expectedNumEdges(final int expectedNumEdges) {
this.expectedNumEdges = expectedNumEdges;
return this;
}
/**
* Sets the distribution to be used to generate the sizes of communities.
*/
public Builder communityDistribution(final Distribution community) {
this.communitySize = community;
return this;
}
/**
* Sets the distribution to be used to generate the out-degrees of vertices.
*/
public Builder degreeDistribution(final Distribution degree) {
this.edgeDegree = degree;
return this;
}
/**
* Sets the percentage of edges that cross a community, i.e. connect a vertex to a vertex in
* another community. The lower this value, the higher the modularity of the generated communities.
*
* @param percentage Percentage of community crossing edges. Must be in [0,1]
*/
public Builder crossCommunityPercentage(final double percentage) {
if (percentage < 0.0 || percentage > 1.0)
throw new IllegalArgumentException("Percentage must be between 0 and 1");
this.crossCommunityPercentage = percentage;
return this;
}
public CommunityGenerator create() {
if (null == communitySize)
throw new IllegalStateException("Need to initialize community size distribution");
if (null == edgeDegree) throw new IllegalStateException("Need to initialize degree distribution");
return new CommunityGenerator(this.g, this.label, this.edgeProcessor, this.vertexProcessor, this.seedSupplier,
this.communitySize, this.edgeDegree, crossCommunityPercentage, vertices,
expectedNumCommunities, expectedNumEdges);
}
}
}
| apache-2.0 |
evernym/zeno | plenum/test/msgs.py | 572 | from plenum.common.messages.fields import NonEmptyStringField
from plenum.common.messages.message_base import MessageBase
from plenum.common.messages.node_message_factory import node_message_factory
from plenum.common.util import randomString
def randomMsg():
return TestMsg('subject ' + randomString(),
'content ' + randomString())
class TestMsg(MessageBase):
typename = "TESTMSG"
schema = (
("subject", NonEmptyStringField()),
("content", NonEmptyStringField()),
)
node_message_factory.set_message_class(TestMsg)
| apache-2.0 |
cmsni/.NET-SDK | Backendless.Test/PersistenceService/Entities/PrimitiveEntities/DateEntity.cs | 864 | using System;
using BackendlessAPI.Test.PersistenceService.Entities.BaseEntities;
namespace BackendlessAPI.Test.PersistenceService.Entities.PrimitiveEntities
{
public class DateEntity: CreatedEntity
{
public DateTime DateField { get; set; }
protected bool Equals( DateEntity other )
{
return base.Equals( other ) && DateField.Equals( other.DateField );
}
public override bool Equals( object obj )
{
if( ReferenceEquals( null, obj ) )
return false;
if( ReferenceEquals( this, obj ) )
return true;
if( obj.GetType() != this.GetType() )
return false;
return Equals( (DateEntity) obj );
}
public override int GetHashCode()
{
unchecked
{
return (base.GetHashCode()*397) ^ DateField.GetHashCode();
}
}
}
}
| apache-2.0 |
UIKit0/jsyn | tests/com/jsyn/benchmarks/BenchJSyn.java | 7537 | /*
* Copyright 2013 Phil Burk, Mobileer Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package com.jsyn.benchmarks;
import com.jsyn.JSyn;
import com.jsyn.Synthesizer;
import com.jsyn.unitgen.PassThrough;
import com.jsyn.unitgen.PitchDetector;
import com.jsyn.unitgen.SawtoothOscillator;
import com.jsyn.unitgen.SawtoothOscillatorBL;
import com.jsyn.unitgen.SawtoothOscillatorDPW;
import com.jsyn.unitgen.SineOscillator;
import com.jsyn.unitgen.SquareOscillator;
import com.jsyn.unitgen.SquareOscillatorBL;
import com.jsyn.unitgen.UnitOscillator;
import com.softsynth.math.FourierMath;
/**
* @author Phil Burk (C) 2013 Mobileer Inc
*/
public class BenchJSyn {
private Synthesizer synth;
private long startTime;
private long endTime;
private PassThrough pass;
public void run() {
try {
// Run multiple times to see if HotSpot compiler or cache makes a difference.
for (int i = 0; i < 4; i++) {
benchmark();
}
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void benchmark() throws InstantiationException, IllegalAccessException,
InterruptedException {
double realTime = 10.0;
int count = 40;
// benchFFTDouble();
// benchFFTFloat();
/*
* realTime = 20.0; benchmarkOscillator(SawtoothOscillator.class, count, realTime);
* benchmarkOscillator(SawtoothOscillatorDPW.class, count, realTime);
* benchmarkOscillator(SawtoothOscillatorBL.class, count, realTime);
*/
benchmarkOscillator(SquareOscillator.class, count, realTime);
benchmarkOscillator(SquareOscillatorBL.class, count, realTime);
benchmarkOscillator(SineOscillator.class, count, realTime);
benchmarkPitchDetector(count, realTime);
}
public void benchFFTDouble() {
int size = 2048;
int bin = 5;
int count = 20000;
double[] ar = new double[size];
double[] ai = new double[size];
double[] magnitudes = new double[size];
double amplitude = 1.0;
addSineWave(size, bin, ar, amplitude);
System.out.println("Bench double FFT");
startTiming();
for (int i = 0; i < count; i++) {
FourierMath.transform(1, size, ar, ai);
}
endTiming(FourierMath.class, count, size / (2.0 * 44100));
FourierMath.calculateMagnitudes(ar, ai, magnitudes);
assert (magnitudes[bin - 1] < 0.001);
assert (magnitudes[bin] > 0.5);
assert (magnitudes[bin + 1] < 0.001);
}
public void benchFFTFloat() {
int size = 2048;
int bin = 5;
int count = 20000;
float[] ar = new float[size];
float[] ai = new float[size];
float[] magnitudes = new float[size];
float amplitude = 1.0f;
addSineWave(size, bin, ar, amplitude);
System.out.println("Bench float FFT");
startTiming();
for (int i = 0; i < count; i++) {
FourierMath.transform(1, size, ar, ai);
}
endTiming(FourierMath.class, count, size / (2.0 * 44100));
FourierMath.calculateMagnitudes(ar, ai, magnitudes);
assert (magnitudes[bin - 1] < 0.001);
assert (magnitudes[bin] > 0.5);
assert (magnitudes[bin + 1] < 0.001);
}
private void addSineWave(int size, int bin, double[] ar, double amplitude) {
double phase = 0.0;
double phaseIncrement = 2.0 * Math.PI * bin / size;
for (int i = 0; i < size; i++) {
ar[i] += Math.sin(phase) * amplitude;
// System.out.println( i + " = " + ar[i] );
phase += phaseIncrement;
}
}
private void addSineWave(int size, int bin, float[] ar, float amplitude) {
float phase = 0.0f;
float phaseIncrement = (float) (2.0 * Math.PI * bin / size);
for (int i = 0; i < size; i++) {
ar[i] += (float) Math.sin(phase) * amplitude;
// System.out.println( i + " = " + ar[i] );
phase += phaseIncrement;
}
}
private void stopSynth() {
synth.stop();
}
private void startSynth() {
synth = JSyn.createSynthesizer(); // Mac
// synth = JSyn.createSynthesizer( new JSynAndroidAudioDevice() ); // Android
synth.setRealTime(false);
pass = new PassThrough();
synth.add(pass);
synth.start();
pass.start();
}
private void benchmarkOscillator(Class<?> clazz, int count, double realTime)
throws InstantiationException, IllegalAccessException, InterruptedException {
startSynth();
for (int i = 0; i < count; i++) {
UnitOscillator osc = (UnitOscillator) clazz.newInstance();
osc.output.connect(pass.input);
synth.add(osc);
}
startTiming();
synth.sleepFor(realTime);
endTiming(clazz, count, realTime);
stopSynth();
}
private void benchmarkPitchDetector(int count, double realTime) throws InstantiationException,
IllegalAccessException, InterruptedException {
startSynth();
PitchDetector detector = new PitchDetector();
synth.add(detector);
double frequency = 198.0;
double period = synth.getFrameRate() / frequency;
// simple harmonic synthesis
for (int i = 0; i < count; i++) {
SineOscillator osc = new SineOscillator();
synth.add(osc);
osc.frequency.set(frequency * (i + 1));
osc.amplitude.set(0.5 * (1.0 - (i * 0.2)));
osc.output.connect(detector.input);
}
detector.start();
startTiming();
synth.sleepFor(realTime);
endTiming(PitchDetector.class, count, realTime);
double measuredPeriod = detector.period.getValue();
double confidence = detector.confidence.getValue();
System.out.println("period = " + period + ", measured = " + measuredPeriod
+ ", confidence = " + confidence);
if (confidence > 0.1) {
assert (Math.abs(measuredPeriod - period) < 0.1);
}
stopSynth();
}
private void endTiming(Class<?> clazz, int count, double realTime) {
endTime = System.nanoTime();
double elapsedTime = (endTime - startTime) * 1E-9;
double percent = 100.0 * elapsedTime / (realTime * count);
System.out.printf("%32s took %5.3f/%d seconds to process %5.4f of audio = %6.3f%c.\n",
clazz.getSimpleName(), elapsedTime, count, realTime, percent, '%');
}
private void startTiming() {
startTime = System.nanoTime();
}
/**
* @param args
*/
public static void main(String[] args) {
new BenchJSyn().run();
}
}
| apache-2.0 |
gotostack/neutron-lbaas | neutron_lbaas/tests/unit/services/loadbalancer/agent/test_agent_manager.py | 16595 | # Copyright 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import mock
from neutron.plugins.common import constants
from neutron_lbaas.services.loadbalancer.agent import agent_manager as manager
from neutron_lbaas.tests import base
class TestManager(base.BaseTestCase):
def setUp(self):
super(TestManager, self).setUp()
mock_conf = mock.Mock()
mock_conf.device_driver = ['devdriver']
self.mock_importer = mock.patch.object(manager, 'importutils').start()
rpc_mock_cls = mock.patch(
'neutron_lbaas.services.loadbalancer.agent.agent_api.LbaasAgentApi'
).start()
# disable setting up periodic state reporting
mock_conf.AGENT.report_interval = 0
self.mgr = manager.LbaasAgentManager(mock_conf)
self.rpc_mock = rpc_mock_cls.return_value
self.log = mock.patch.object(manager, 'LOG').start()
self.driver_mock = mock.Mock()
self.mgr.device_drivers = {'devdriver': self.driver_mock}
self.mgr.instance_mapping = {'1': 'devdriver', '2': 'devdriver'}
self.mgr.needs_resync = False
def test_initialize_service_hook(self):
with mock.patch.object(self.mgr, 'sync_state') as sync:
self.mgr.initialize_service_hook(mock.Mock())
sync.assert_called_once_with()
def test_periodic_resync_needs_sync(self):
with mock.patch.object(self.mgr, 'sync_state') as sync:
self.mgr.needs_resync = True
self.mgr.periodic_resync(mock.Mock())
sync.assert_called_once_with()
def test_periodic_resync_no_sync(self):
with mock.patch.object(self.mgr, 'sync_state') as sync:
self.mgr.needs_resync = False
self.mgr.periodic_resync(mock.Mock())
self.assertFalse(sync.called)
def test_collect_stats(self):
self.mgr.collect_stats(mock.Mock())
self.rpc_mock.update_pool_stats.assert_has_calls([
mock.call('1', mock.ANY),
mock.call('2', mock.ANY)
], any_order=True)
def test_collect_stats_exception(self):
self.driver_mock.get_stats.side_effect = Exception
self.mgr.collect_stats(mock.Mock())
self.assertFalse(self.rpc_mock.called)
self.assertTrue(self.mgr.needs_resync)
self.assertTrue(self.log.exception.called)
def _sync_state_helper(self, ready, reloaded, destroyed):
with contextlib.nested(
mock.patch.object(self.mgr, '_reload_pool'),
mock.patch.object(self.mgr, '_destroy_pool')
) as (reload, destroy):
self.rpc_mock.get_ready_devices.return_value = ready
self.mgr.sync_state()
self.assertEqual(len(reloaded), len(reload.mock_calls))
self.assertEqual(len(destroyed), len(destroy.mock_calls))
reload.assert_has_calls([mock.call(i) for i in reloaded],
any_order=True)
destroy.assert_has_calls([mock.call(i) for i in destroyed],
any_order=True)
self.assertFalse(self.mgr.needs_resync)
def test_sync_state_all_known(self):
self._sync_state_helper(['1', '2'], ['1', '2'], [])
def test_sync_state_all_unknown(self):
self.mgr.instance_mapping = {}
self._sync_state_helper(['1', '2'], ['1', '2'], [])
def test_sync_state_destroy_all(self):
self._sync_state_helper([], [], ['1', '2'])
def test_sync_state_both(self):
self.mgr.instance_mapping = {'1': 'devdriver'}
self._sync_state_helper(['2'], ['2'], ['1'])
def test_sync_state_exception(self):
self.rpc_mock.get_ready_devices.side_effect = Exception
self.mgr.sync_state()
self.assertTrue(self.log.exception.called)
self.assertTrue(self.mgr.needs_resync)
def test_reload_pool(self):
config = {'driver': 'devdriver'}
self.rpc_mock.get_logical_device.return_value = config
pool_id = 'new_id'
self.assertNotIn(pool_id, self.mgr.instance_mapping)
self.mgr._reload_pool(pool_id)
self.driver_mock.deploy_instance.assert_called_once_with(config)
self.assertIn(pool_id, self.mgr.instance_mapping)
self.rpc_mock.pool_deployed.assert_called_once_with(pool_id)
def test_reload_pool_driver_not_found(self):
config = {'driver': 'unknown_driver'}
self.rpc_mock.get_logical_device.return_value = config
pool_id = 'new_id'
self.assertNotIn(pool_id, self.mgr.instance_mapping)
self.mgr._reload_pool(pool_id)
self.assertTrue(self.log.error.called)
self.assertFalse(self.driver_mock.deploy_instance.called)
self.assertNotIn(pool_id, self.mgr.instance_mapping)
self.assertFalse(self.rpc_mock.pool_deployed.called)
def test_reload_pool_exception_on_driver(self):
config = {'driver': 'devdriver'}
self.rpc_mock.get_logical_device.return_value = config
self.driver_mock.deploy_instance.side_effect = Exception
pool_id = 'new_id'
self.assertNotIn(pool_id, self.mgr.instance_mapping)
self.mgr._reload_pool(pool_id)
self.driver_mock.deploy_instance.assert_called_once_with(config)
self.assertNotIn(pool_id, self.mgr.instance_mapping)
self.assertFalse(self.rpc_mock.pool_deployed.called)
self.assertTrue(self.log.exception.called)
self.assertTrue(self.mgr.needs_resync)
def test_destroy_pool(self):
pool_id = '1'
self.assertIn(pool_id, self.mgr.instance_mapping)
self.mgr._destroy_pool(pool_id)
self.driver_mock.undeploy_instance.assert_called_once_with(
pool_id, delete_namespace=True)
self.assertNotIn(pool_id, self.mgr.instance_mapping)
self.rpc_mock.pool_destroyed.assert_called_once_with(pool_id)
self.assertFalse(self.mgr.needs_resync)
def test_destroy_pool_exception_on_driver(self):
pool_id = '1'
self.assertIn(pool_id, self.mgr.instance_mapping)
self.driver_mock.undeploy_instance.side_effect = Exception
self.mgr._destroy_pool(pool_id)
self.driver_mock.undeploy_instance.assert_called_once_with(
pool_id, delete_namespace=True)
self.assertIn(pool_id, self.mgr.instance_mapping)
self.assertFalse(self.rpc_mock.pool_destroyed.called)
self.assertTrue(self.log.exception.called)
self.assertTrue(self.mgr.needs_resync)
def test_get_driver_unknown_device(self):
self.assertRaises(manager.DeviceNotFoundOnAgent,
self.mgr._get_driver, 'unknown')
def test_remove_orphans(self):
self.mgr.remove_orphans()
orphans = {'1': "Fake", '2': "Fake"}
self.driver_mock.remove_orphans.assert_called_once_with(orphans.keys())
def test_create_vip(self):
vip = {'id': 'id1', 'pool_id': '1'}
self.mgr.create_vip(mock.Mock(), vip)
self.driver_mock.create_vip.assert_called_once_with(vip)
self.rpc_mock.update_status.assert_called_once_with('vip', vip['id'],
constants.ACTIVE)
def test_create_vip_failed(self):
vip = {'id': 'id1', 'pool_id': '1'}
self.driver_mock.create_vip.side_effect = Exception
self.mgr.create_vip(mock.Mock(), vip)
self.driver_mock.create_vip.assert_called_once_with(vip)
self.rpc_mock.update_status.assert_called_once_with('vip', vip['id'],
constants.ERROR)
def test_update_vip(self):
old_vip = {'id': 'id1'}
vip = {'id': 'id1', 'pool_id': '1'}
self.mgr.update_vip(mock.Mock(), old_vip, vip)
self.driver_mock.update_vip.assert_called_once_with(old_vip, vip)
self.rpc_mock.update_status.assert_called_once_with('vip', vip['id'],
constants.ACTIVE)
def test_update_vip_failed(self):
old_vip = {'id': 'id1'}
vip = {'id': 'id1', 'pool_id': '1'}
self.driver_mock.update_vip.side_effect = Exception
self.mgr.update_vip(mock.Mock(), old_vip, vip)
self.driver_mock.update_vip.assert_called_once_with(old_vip, vip)
self.rpc_mock.update_status.assert_called_once_with('vip', vip['id'],
constants.ERROR)
def test_delete_vip(self):
vip = {'id': 'id1', 'pool_id': '1'}
self.mgr.delete_vip(mock.Mock(), vip)
self.driver_mock.delete_vip.assert_called_once_with(vip)
def test_create_pool(self):
pool = {'id': 'id1'}
self.assertNotIn(pool['id'], self.mgr.instance_mapping)
self.mgr.create_pool(mock.Mock(), pool, 'devdriver')
self.driver_mock.create_pool.assert_called_once_with(pool)
self.rpc_mock.update_status.assert_called_once_with('pool', pool['id'],
constants.ACTIVE)
self.assertIn(pool['id'], self.mgr.instance_mapping)
def test_create_pool_failed(self):
pool = {'id': 'id1'}
self.assertNotIn(pool['id'], self.mgr.instance_mapping)
self.driver_mock.create_pool.side_effect = Exception
self.mgr.create_pool(mock.Mock(), pool, 'devdriver')
self.driver_mock.create_pool.assert_called_once_with(pool)
self.rpc_mock.update_status.assert_called_once_with('pool', pool['id'],
constants.ERROR)
self.assertNotIn(pool['id'], self.mgr.instance_mapping)
def test_update_pool(self):
old_pool = {'id': '1'}
pool = {'id': '1'}
self.mgr.update_pool(mock.Mock(), old_pool, pool)
self.driver_mock.update_pool.assert_called_once_with(old_pool, pool)
self.rpc_mock.update_status.assert_called_once_with('pool', pool['id'],
constants.ACTIVE)
def test_update_pool_failed(self):
old_pool = {'id': '1'}
pool = {'id': '1'}
self.driver_mock.update_pool.side_effect = Exception
self.mgr.update_pool(mock.Mock(), old_pool, pool)
self.driver_mock.update_pool.assert_called_once_with(old_pool, pool)
self.rpc_mock.update_status.assert_called_once_with('pool', pool['id'],
constants.ERROR)
def test_delete_pool(self):
pool = {'id': '1'}
self.assertIn(pool['id'], self.mgr.instance_mapping)
self.mgr.delete_pool(mock.Mock(), pool)
self.driver_mock.delete_pool.assert_called_once_with(pool)
self.assertNotIn(pool['id'], self.mgr.instance_mapping)
def test_create_member(self):
member = {'id': 'id1', 'pool_id': '1'}
self.mgr.create_member(mock.Mock(), member)
self.driver_mock.create_member.assert_called_once_with(member)
self.rpc_mock.update_status.assert_called_once_with('member',
member['id'],
constants.ACTIVE)
def test_create_member_failed(self):
member = {'id': 'id1', 'pool_id': '1'}
self.driver_mock.create_member.side_effect = Exception
self.mgr.create_member(mock.Mock(), member)
self.driver_mock.create_member.assert_called_once_with(member)
self.rpc_mock.update_status.assert_called_once_with('member',
member['id'],
constants.ERROR)
def test_update_member(self):
old_member = {'id': 'id1'}
member = {'id': 'id1', 'pool_id': '1'}
self.mgr.update_member(mock.Mock(), old_member, member)
self.driver_mock.update_member.assert_called_once_with(old_member,
member)
self.rpc_mock.update_status.assert_called_once_with('member',
member['id'],
constants.ACTIVE)
def test_update_member_failed(self):
old_member = {'id': 'id1'}
member = {'id': 'id1', 'pool_id': '1'}
self.driver_mock.update_member.side_effect = Exception
self.mgr.update_member(mock.Mock(), old_member, member)
self.driver_mock.update_member.assert_called_once_with(old_member,
member)
self.rpc_mock.update_status.assert_called_once_with('member',
member['id'],
constants.ERROR)
def test_delete_member(self):
member = {'id': 'id1', 'pool_id': '1'}
self.mgr.delete_member(mock.Mock(), member)
self.driver_mock.delete_member.assert_called_once_with(member)
def test_create_monitor(self):
monitor = {'id': 'id1'}
assoc_id = {'monitor_id': monitor['id'], 'pool_id': '1'}
self.mgr.create_pool_health_monitor(mock.Mock(), monitor, '1')
self.driver_mock.create_pool_health_monitor.assert_called_once_with(
monitor, '1')
self.rpc_mock.update_status.assert_called_once_with('health_monitor',
assoc_id,
constants.ACTIVE)
def test_create_monitor_failed(self):
monitor = {'id': 'id1'}
assoc_id = {'monitor_id': monitor['id'], 'pool_id': '1'}
self.driver_mock.create_pool_health_monitor.side_effect = Exception
self.mgr.create_pool_health_monitor(mock.Mock(), monitor, '1')
self.driver_mock.create_pool_health_monitor.assert_called_once_with(
monitor, '1')
self.rpc_mock.update_status.assert_called_once_with('health_monitor',
assoc_id,
constants.ERROR)
def test_update_monitor(self):
monitor = {'id': 'id1'}
assoc_id = {'monitor_id': monitor['id'], 'pool_id': '1'}
self.mgr.update_pool_health_monitor(mock.Mock(), monitor, monitor, '1')
self.driver_mock.update_pool_health_monitor.assert_called_once_with(
monitor, monitor, '1')
self.rpc_mock.update_status.assert_called_once_with('health_monitor',
assoc_id,
constants.ACTIVE)
def test_update_monitor_failed(self):
monitor = {'id': 'id1'}
assoc_id = {'monitor_id': monitor['id'], 'pool_id': '1'}
self.driver_mock.update_pool_health_monitor.side_effect = Exception
self.mgr.update_pool_health_monitor(mock.Mock(), monitor, monitor, '1')
self.driver_mock.update_pool_health_monitor.assert_called_once_with(
monitor, monitor, '1')
self.rpc_mock.update_status.assert_called_once_with('health_monitor',
assoc_id,
constants.ERROR)
def test_delete_monitor(self):
monitor = {'id': 'id1'}
self.mgr.delete_pool_health_monitor(mock.Mock(), monitor, '1')
self.driver_mock.delete_pool_health_monitor.assert_called_once_with(
monitor, '1')
def test_agent_disabled(self):
payload = {'admin_state_up': False}
self.mgr.agent_updated(mock.Mock(), payload)
self.driver_mock.undeploy_instance.assert_has_calls(
[mock.call('1', delete_namespace=True),
mock.call('2', delete_namespace=True)],
any_order=True
)
| apache-2.0 |
prabushi/devstudio-tooling-esb | plugins/org.wso2.developerstudio.eclipse.gmf.esb/src/org/wso2/developerstudio/eclipse/gmf/esb/PropertyGroupMediator.java | 4546 | /*
* Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.developerstudio.eclipse.gmf.esb;
import org.eclipse.emf.common.util.EList;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Property Group Mediator</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* </p>
* <ul>
* <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.PropertyGroupMediator#getInputConnector <em>Input Connector</em>}</li>
* <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.PropertyGroupMediator#getOutputConnector <em>Output Connector</em>}</li>
* <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.PropertyGroupMediator#getProperties <em>Properties</em>}</li>
* </ul>
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getPropertyGroupMediator()
* @model
* @generated
*/
public interface PropertyGroupMediator extends Mediator {
/**
* Returns the value of the '<em><b>Input Connector</b></em>' containment reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Input Connector</em>' containment reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Input Connector</em>' containment reference.
* @see #setInputConnector(PropertyGroupMediatorInputConnector)
* @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getPropertyGroupMediator_InputConnector()
* @model containment="true"
* @generated
*/
PropertyGroupMediatorInputConnector getInputConnector();
/**
* Sets the value of the '{@link org.wso2.developerstudio.eclipse.gmf.esb.PropertyGroupMediator#getInputConnector <em>Input Connector</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Input Connector</em>' containment reference.
* @see #getInputConnector()
* @generated
*/
void setInputConnector(PropertyGroupMediatorInputConnector value);
/**
* Returns the value of the '<em><b>Output Connector</b></em>' containment reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Output Connector</em>' containment reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Output Connector</em>' containment reference.
* @see #setOutputConnector(PropertyGroupMediatorOutputConnector)
* @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getPropertyGroupMediator_OutputConnector()
* @model containment="true"
* @generated
*/
PropertyGroupMediatorOutputConnector getOutputConnector();
/**
* Sets the value of the '{@link org.wso2.developerstudio.eclipse.gmf.esb.PropertyGroupMediator#getOutputConnector <em>Output Connector</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Output Connector</em>' containment reference.
* @see #getOutputConnector()
* @generated
*/
void setOutputConnector(PropertyGroupMediatorOutputConnector value);
/**
* Returns the value of the '<em><b>Properties</b></em>' containment reference list.
* The list contents are of type {@link org.wso2.developerstudio.eclipse.gmf.esb.PropertyMediator}.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Properties</em>' containment reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Properties</em>' containment reference list.
* @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getPropertyGroupMediator_Properties()
* @model containment="true"
* @generated
*/
EList<PropertyMediator> getProperties();
} // PropertyGroupMediator
| apache-2.0 |
Frameworkium/frameworkium | src/test/java/theinternet/pages/FileUploadSuccessPage.java | 621 | package theinternet.pages;
import com.frameworkium.core.ui.annotations.Visible;
import com.frameworkium.core.ui.pages.BasePage;
import io.qameta.allure.Step;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import ru.yandex.qatools.htmlelements.annotations.Name;
public class FileUploadSuccessPage extends BasePage<FileUploadSuccessPage> {
@Visible
@Name("Uploaded Files")
@FindBy(css = "div#uploaded-files")
private WebElement uploadedFiles;
@Step("Get uploaded files list")
public String getUploadedFiles() {
return uploadedFiles.getText();
}
}
| apache-2.0 |
ramonsmits/docs.particular.net | Snippets/Core/Core_8/Pipeline/SkipSerializationForInts.cs | 1111 | namespace Core8.Pipeline
{
using System;
using System.Threading.Tasks;
using NServiceBus;
using NServiceBus.Pipeline;
#region SkipSerialization
class SkipSerializationForInts :
Behavior<IOutgoingLogicalMessageContext>
{
public override Task Invoke(IOutgoingLogicalMessageContext context, Func<Task> next)
{
var outgoingLogicalMessage = context.Message;
if (outgoingLogicalMessage.MessageType == typeof(int))
{
var headers = context.Headers;
headers["MyCustomHeader"] = outgoingLogicalMessage.Instance.ToString();
context.SkipSerialization();
}
return next();
}
public class Registration :
RegisterStep
{
public Registration()
: base(
stepId: "SkipSerializationForInts",
behavior: typeof(SkipSerializationForInts),
description: "Skips serialization for integers")
{
}
}
}
#endregion
} | apache-2.0 |
0xD34D/connectbot | src/sk/vx/connectbot/util/Colors.java | 4273 | /*
* ConnectBot: simple, powerful, open-source SSH client for Android
* Copyright 2007 Kenny Root, Jeffrey Sharkey
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sk.vx.connectbot.util;
/**
* @author Kenny Root
*
*/
public class Colors {
public final static Integer[] defaults = new Integer[] {
0xff000000, // black
0xffcc0000, // red
0xff00cc00, // green
0xffcccc00, // brown
0xff0000cc, // blue
0xffcc00cc, // purple
0xff00cccc, // cyan
0xffcccccc, // light grey
0xff444444, // dark grey
0xffff4444, // light red
0xff44ff44, // light green
0xffffff44, // yellow
0xff4444ff, // light blue
0xffff44ff, // light purple
0xff44ffff, // light cyan
0xffffffff, // white
0xff000000, 0xff00005f, 0xff000087, 0xff0000af, 0xff0000d7,
0xff0000ff, 0xff005f00, 0xff005f5f, 0xff005f87, 0xff005faf,
0xff005fd7, 0xff005fff, 0xff008700, 0xff00875f, 0xff008787,
0xff0087af, 0xff0087d7, 0xff0087ff, 0xff00af00, 0xff00af5f,
0xff00af87, 0xff00afaf, 0xff00afd7, 0xff00afff, 0xff00d700,
0xff00d75f, 0xff00d787, 0xff00d7af, 0xff00d7d7, 0xff00d7ff,
0xff00ff00, 0xff00ff5f, 0xff00ff87, 0xff00ffaf, 0xff00ffd7,
0xff00ffff, 0xff5f0000, 0xff5f005f, 0xff5f0087, 0xff5f00af,
0xff5f00d7, 0xff5f00ff, 0xff5f5f00, 0xff5f5f5f, 0xff5f5f87,
0xff5f5faf, 0xff5f5fd7, 0xff5f5fff, 0xff5f8700, 0xff5f875f,
0xff5f8787, 0xff5f87af, 0xff5f87d7, 0xff5f87ff, 0xff5faf00,
0xff5faf5f, 0xff5faf87, 0xff5fafaf, 0xff5fafd7, 0xff5fafff,
0xff5fd700, 0xff5fd75f, 0xff5fd787, 0xff5fd7af, 0xff5fd7d7,
0xff5fd7ff, 0xff5fff00, 0xff5fff5f, 0xff5fff87, 0xff5fffaf,
0xff5fffd7, 0xff5fffff, 0xff870000, 0xff87005f, 0xff870087,
0xff8700af, 0xff8700d7, 0xff8700ff, 0xff875f00, 0xff875f5f,
0xff875f87, 0xff875faf, 0xff875fd7, 0xff875fff, 0xff878700,
0xff87875f, 0xff878787, 0xff8787af, 0xff8787d7, 0xff8787ff,
0xff87af00, 0xff87af5f, 0xff87af87, 0xff87afaf, 0xff87afd7,
0xff87afff, 0xff87d700, 0xff87d75f, 0xff87d787, 0xff87d7af,
0xff87d7d7, 0xff87d7ff, 0xff87ff00, 0xff87ff5f, 0xff87ff87,
0xff87ffaf, 0xff87ffd7, 0xff87ffff, 0xffaf0000, 0xffaf005f,
0xffaf0087, 0xffaf00af, 0xffaf00d7, 0xffaf00ff, 0xffaf5f00,
0xffaf5f5f, 0xffaf5f87, 0xffaf5faf, 0xffaf5fd7, 0xffaf5fff,
0xffaf8700, 0xffaf875f, 0xffaf8787, 0xffaf87af, 0xffaf87d7,
0xffaf87ff, 0xffafaf00, 0xffafaf5f, 0xffafaf87, 0xffafafaf,
0xffafafd7, 0xffafafff, 0xffafd700, 0xffafd75f, 0xffafd787,
0xffafd7af, 0xffafd7d7, 0xffafd7ff, 0xffafff00, 0xffafff5f,
0xffafff87, 0xffafffaf, 0xffafffd7, 0xffafffff, 0xffd70000,
0xffd7005f, 0xffd70087, 0xffd700af, 0xffd700d7, 0xffd700ff,
0xffd75f00, 0xffd75f5f, 0xffd75f87, 0xffd75faf, 0xffd75fd7,
0xffd75fff, 0xffd78700, 0xffd7875f, 0xffd78787, 0xffd787af,
0xffd787d7, 0xffd787ff, 0xffd7af00, 0xffd7af5f, 0xffd7af87,
0xffd7afaf, 0xffd7afd7, 0xffd7afff, 0xffd7d700, 0xffd7d75f,
0xffd7d787, 0xffd7d7af, 0xffd7d7d7, 0xffd7d7ff, 0xffd7ff00,
0xffd7ff5f, 0xffd7ff87, 0xffd7ffaf, 0xffd7ffd7, 0xffd7ffff,
0xffff0000, 0xffff005f, 0xffff0087, 0xffff00af, 0xffff00d7,
0xffff00ff, 0xffff5f00, 0xffff5f5f, 0xffff5f87, 0xffff5faf,
0xffff5fd7, 0xffff5fff, 0xffff8700, 0xffff875f, 0xffff8787,
0xffff87af, 0xffff87d7, 0xffff87ff, 0xffffaf00, 0xffffaf5f,
0xffffaf87, 0xffffafaf, 0xffffafd7, 0xffffafff, 0xffffd700,
0xffffd75f, 0xffffd787, 0xffffd7af, 0xffffd7d7, 0xffffd7ff,
0xffffff00, 0xffffff5f, 0xffffff87, 0xffffffaf, 0xffffffd7,
0xffffffff, 0xff080808, 0xff121212, 0xff1c1c1c, 0xff262626,
0xff303030, 0xff3a3a3a, 0xff444444, 0xff4e4e4e, 0xff585858,
0xff626262, 0xff6c6c6c, 0xff767676, 0xff808080, 0xff8a8a8a,
0xff949494, 0xff9e9e9e, 0xffa8a8a8, 0xffb2b2b2, 0xffbcbcbc,
0xffc6c6c6, 0xffd0d0d0, 0xffdadada, 0xffe4e4e4, 0xffeeeeee,
};
}
| apache-2.0 |
xzturn/tensorflow | tensorflow/core/grappler/optimizers/common_subgraph_elimination.cc | 9990 | /* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/core/grappler/optimizers/common_subgraph_elimination.h"
#include <set>
#include <string>
#include <unordered_set>
#include <utility>
#include <vector>
#include "absl/container/flat_hash_map.h"
#include "absl/container/flat_hash_set.h"
#include "tensorflow/core/framework/attr_value_util.h"
#include "tensorflow/core/framework/graph.pb.h"
#include "tensorflow/core/framework/node_def.pb.h"
#include "tensorflow/core/graph/tensor_id.h"
#include "tensorflow/core/grappler/graph_topology_view.h"
#include "tensorflow/core/grappler/grappler_item.h"
#include "tensorflow/core/grappler/op_types.h"
#include "tensorflow/core/grappler/optimizers/graph_optimizer.h"
#include "tensorflow/core/grappler/utils.h"
#include "tensorflow/core/grappler/utils/canonicalizer.h"
#include "tensorflow/core/grappler/utils/topological_sort.h"
#include "tensorflow/core/grappler/utils/traversal.h"
#include "tensorflow/core/lib/gtl/flatset.h"
#include "tensorflow/core/platform/errors.h"
#include "tensorflow/core/platform/hash.h"
#include "tensorflow/core/platform/status.h"
#include "tensorflow/core/platform/strcat.h"
#include "tensorflow/core/platform/stringpiece.h"
#include "tensorflow/core/platform/types.h"
namespace tensorflow {
namespace grappler {
class Cluster;
} // namespace grappler
} // namespace tensorflow
using tensorflow::strings::StrCat;
namespace tensorflow {
namespace grappler {
class UniqueNodes {
public:
NodeDef* FindOrAddRepresentative(NodeDef* node) {
uint64 sig = ComputeSignature(*node);
std::vector<NodeDef*>& candidates = rep_[sig];
for (auto& candidate : candidates) {
if ((candidate == node) || SameNode(*candidate, *node)) {
return candidate;
}
}
candidates.push_back(node);
return node;
}
void RemoveRepresentative(NodeDef* node) {
auto it = memoized_signatures_.find(node);
if (it == memoized_signatures_.end()) return;
std::vector<NodeDef*>& candidates = rep_[it->second];
for (int i = 0; i < candidates.size(); ++i) {
if (candidates[i] == node) {
std::swap(candidates[i], candidates[candidates.size() - 1]);
candidates.resize(candidates.size() - 1);
break;
}
}
memoized_signatures_.erase(node);
}
private:
uint64 ComputeSignature(const NodeDef& node);
bool SameNode(const NodeDef& node1, const NodeDef& node2) const;
absl::flat_hash_map<uint64, std::vector<NodeDef*>> rep_;
absl::flat_hash_map<const NodeDef*, uint64> memoized_signatures_;
};
uint64 UniqueNodes::ComputeSignature(const NodeDef& node) {
auto it = memoized_signatures_.find(&node);
if (it != memoized_signatures_.end()) return it->second;
uint64 h = Hash64(node.op());
h = Hash64Combine(Hash64(node.device()), h);
for (const auto& input : node.input()) {
const TensorId input_tensor = ParseTensorName(input);
uint64 input_hash = Hash64Combine(
Hash64(input_tensor.node().data(), input_tensor.node().size()),
std::hash<int>()(input_tensor.index()));
h = Hash64CombineUnordered(input_hash, h);
}
for (const auto& attr : node.attr()) {
uint64 attr_hash =
Hash64Combine(Hash64(attr.first), FastAttrValueHash(attr.second));
h = Hash64CombineUnordered(attr_hash, h);
}
memoized_signatures_.emplace(&node, h);
return h;
}
// PRECONDITION:
// Node input orders are assumed to be canonicalized, i.e. control inputs for
// all nodes as well as regular inputs for commutative nodes must be sorted.
bool UniqueNodes::SameNode(const NodeDef& node1, const NodeDef& node2) const {
if (node1.op() != node2.op()) {
return false;
}
if (node1.device() != node2.device()) {
return false;
}
if (node1.input_size() != node2.input_size()) {
return false;
}
if (node1.attr_size() != node2.attr_size()) {
return false;
}
// Compare inputs.
auto it1 = node1.input().begin();
auto it2 = node2.input().begin();
for (; it1 != node1.input().end(); ++it1, ++it2) {
if (*it1 != *it2) return false;
}
// Compare attributes.
for (const auto& attr1 : node1.attr()) {
auto it = node2.attr().find(attr1.first);
if (it == node2.attr().end()) return false;
if (!FastAreAttrValuesEqual(attr1.second, it->second)) return false;
}
return true;
}
bool CommonSubgraphElimination::CanDedup(const NodeDef& node) const {
if (nodes_to_preserve_.find(node.name()) != nodes_to_preserve_.end()) {
return false;
}
if (IsEnter(node) || IsExit(node)) {
return false;
}
if (node.device().find("SPU") != string::npos) {
return false;
}
// Workaround for Assert and Print mistakenly being labeled as stateful.
if (IsAssert(node) || IsPrint(node)) {
return true;
}
return IsFreeOfSideEffect(node);
}
Status CommonSubgraphElimination::DedupComputations(GraphDef* optimized_graph) {
CanonicalizeGraph(optimized_graph);
GraphTopologyView graph_view;
if (!graph_view.InitializeFromGraph(*optimized_graph).ok()) {
LOG(WARNING) << "Failed to initialize GraphTopologyView.";
return Status::OK();
}
// If either node or rep feeds an inplace op, deduping them may cause data
// races. For example: If we dedup nodes initializing two independent
// inplace accumulations, they will write to the same buffer, clobbering
// each other's results.
absl::flat_hash_set<const NodeDef*> feeds_inplace_op;
for (int i = 0; i < optimized_graph->node_size(); ++i) {
const NodeDef& root = optimized_graph->node(i);
if (feeds_inplace_op.find(&root) != feeds_inplace_op.end()) continue;
if (ModifiesInputsInPlace(root)) {
const auto is_continue_traversal = [&](const NodeDef* node) -> bool {
return node->op() == root.op() || !NeverForwardsInputs(*node);
};
DfsTraversal(graph_view, {&root}, TraversalDirection::kFollowInputs,
DfsPredicates::Advance(is_continue_traversal),
DfsCallbacks::PreOrder([&](const NodeDef* node) {
feeds_inplace_op.insert(node);
}));
}
}
std::vector<bool> can_dedup(optimized_graph->node_size());
for (int i = 0; i < optimized_graph->node_size(); ++i) {
const NodeDef& node = optimized_graph->node(i);
can_dedup[i] = (feeds_inplace_op.find(&node) == feeds_inplace_op.end()) &&
CanDedup(node);
}
bool stop = true;
std::set<int> duplicates;
UniqueNodes nodes;
NodeMap node_map(optimized_graph);
do {
stop = true;
for (int i = 0; i < optimized_graph->node_size(); ++i) {
if (!can_dedup[i] || duplicates.find(i) != duplicates.end()) {
continue;
}
NodeDef* node = optimized_graph->mutable_node(i);
NodeDef* rep = nodes.FindOrAddRepresentative(node);
if (rep == node) {
continue;
}
const std::set<NodeDef*>& tmp = node_map.GetOutputs(node->name());
std::vector<NodeDef*> fanouts(tmp.begin(), tmp.end());
for (NodeDef* fanout : fanouts) {
// Update consumers of node.
bool updated_fanout = false;
for (int i = 0; i < fanout->input_size(); ++i) {
string* fanout_input = fanout->mutable_input(i);
const int position =
NodePositionIfSameNode(*fanout_input, node->name());
// Update name in-place.
if (position < -1) {
continue;
} else {
if (!updated_fanout) {
// The signature of the fanout node will change. Remove it from
// nodes.
nodes.RemoveRepresentative(fanout);
}
updated_fanout = true;
if (position > 0) {
*fanout_input = StrCat(rep->name(), ":", position);
} else if (position == 0) {
*fanout_input = rep->name();
} else {
*fanout_input = StrCat("^", rep->name());
}
}
}
if (updated_fanout) {
node_map.UpdateInput(fanout->name(), node->name(), rep->name());
CanonicalizeNode(fanout);
}
}
duplicates.insert(i);
stop = false;
}
} while (!stop);
// Delete duplicates
if (fetch_nodes_known_ && !duplicates.empty()) {
EraseNodesFromGraph(duplicates, optimized_graph);
}
return Status::OK();
}
Status CommonSubgraphElimination::Optimize(Cluster* /*cluster*/,
const GrapplerItem& item,
GraphDef* optimized_graph) {
// Set up helper data structures.
nodes_to_preserve_ = item.NodesToPreserve();
fetch_nodes_known_ = !item.fetch.empty();
*optimized_graph = item.graph;
// Perform topological sort on the graph in order to help DedupComputations
// optimize larger subgraphs starting from the roots with more inputs.
TF_RETURN_IF_ERROR(TopologicalSort(optimized_graph));
GRAPPLER_RETURN_IF_DEADLINE_EXCEEDED();
return DedupComputations(optimized_graph);
}
void CommonSubgraphElimination::Feedback(Cluster* /*cluster*/,
const GrapplerItem& /*item*/,
const GraphDef& /*optimized_graph*/,
double /*result*/) {
// Nothing to do for ArithmeticOptimizer.
}
} // namespace grappler
} // namespace tensorflow
| apache-2.0 |
philoserf/knife-proxmox | lib/chef/knife/proxmox_template_list.rb | 1153 | require 'chef/knife/proxmox_base'
class Chef
class Knife
class ProxmoxTemplateList < Knife
include Knife::ProxmoxBase
banner "knife proxmox template list (options)"
def run
# Needed to initialize @connection and @auth_params
connection
template_list = [
ui.color('Id' , :bold),
ui.color('Type', :bold),
ui.color('Name', :bold),
ui.color('Size', :bold)
]
@connection["nodes/#{Chef::Config[:knife][:pve_node_name]}/storage/local/content"].get @auth_params do |response, request, result, &block|
template_index = 0
JSON.parse(response.body)['data'].each { |entry|
if entry['content'] != 'iso' then
template_list << template_index.to_s
template_list << entry['format']
template_list << entry['volid']
template_list << (entry['size'].to_i/1048576).to_s + " MB"
template_index+=1
end
}
end
puts ui.list(template_list, :uneven_columns_across, 4)
end
end
end
end
| apache-2.0 |
39mi/jtd | src/protocpl/CommandSunTime.java | 347 | package protocpl;
import mina.CommandBase;
import mina.ICmdParser;
import mina.CmdFactoryBase.MONITOR_CMD_TYPE;
public class CommandSunTime extends CommandBase {
public CommandSunTime(ICmdParser parser, byte[] data) {
super(parser, data);
// TODO Auto-generated constructor stub
m_eCmdType = MONITOR_CMD_TYPE.MONITOR_CMD_SUN_TIME;
}
}
| apache-2.0 |
consulo/consulo-spring | webflow/src/com/intellij/spring/webflow/model/converters/WebflowScopeReference.java | 1560 | package com.intellij.spring.webflow.model.converters;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiReferenceBase;
import com.intellij.psi.impl.FakePsiElement;
import com.intellij.util.xml.GenericDomValue;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.Function;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.module.Module;
import com.intellij.spring.webflow.el.WebflowScopeProvider;
import com.intellij.spring.webflow.el.WebflowScopeProviderManager;
import java.util.List;
/**
* User: Sergey.Vasiliev
*/
public class WebflowScopeReference extends PsiReferenceBase<PsiElement> {
private final PsiElement myElement;
private final GenericDomValue myDomValue;
public WebflowScopeReference(final PsiElement element, final TextRange range, final GenericDomValue domValue) {
super(element, range, true);
myElement = element;
myDomValue = domValue;
}
public PsiElement resolve() {
return new FakePsiElement() {
public PsiElement getParent() {
return myElement;
}
};
}
public Object[] getVariants() {
final Module module = myDomValue.getModule();
assert module != null;
final List<WebflowScopeProvider> list =
WebflowScopeProviderManager.getService(module).getAvailableProviders(myDomValue);
return ContainerUtil.map2Array(list, new Function<WebflowScopeProvider, Object>() {
public Object fun(final WebflowScopeProvider provider) {
return provider.getScope().getName();
}
});
}
}
| apache-2.0 |
gotroy/elasticsearch | src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java | 6425 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.percolate;
import com.google.common.collect.ImmutableList;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.percolator.PercolateContext;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.facet.InternalFacets;
import org.elasticsearch.search.highlight.HighlightField;
import org.elasticsearch.search.query.QuerySearchResult;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*/
public class PercolateShardResponse extends BroadcastShardOperationResponse {
private static final BytesRef[] EMPTY_MATCHES = new BytesRef[0];
private static final float[] EMPTY_SCORES = new float[0];
private static final List<Map<String, HighlightField>> EMPTY_HL = ImmutableList.of();
private long count;
private float[] scores;
private BytesRef[] matches;
private List<Map<String, HighlightField>> hls;
private byte percolatorTypeId;
private int requestedSize;
private InternalFacets facets;
private InternalAggregations aggregations;
PercolateShardResponse() {
hls = new ArrayList<Map<String, HighlightField>>();
}
public PercolateShardResponse(BytesRef[] matches, List<Map<String, HighlightField>> hls, long count, float[] scores, PercolateContext context, String index, int shardId) {
super(index, shardId);
this.matches = matches;
this.hls = hls;
this.count = count;
this.scores = scores;
this.percolatorTypeId = context.percolatorTypeId;
this.requestedSize = context.size();
QuerySearchResult result = context.queryResult();
if (result != null) {
if (result.facets() != null) {
this.facets = new InternalFacets(result.facets().facets());
}
if (result.aggregations() != null) {
this.aggregations = (InternalAggregations) result.aggregations();
}
}
}
public PercolateShardResponse(BytesRef[] matches, long count, float[] scores, PercolateContext context, String index, int shardId) {
this(matches, EMPTY_HL, count, scores, context, index, shardId);
}
public PercolateShardResponse(BytesRef[] matches, List<Map<String, HighlightField>> hls, long count, PercolateContext context, String index, int shardId) {
this(matches, hls, count, EMPTY_SCORES, context, index, shardId);
}
public PercolateShardResponse(long count, PercolateContext context, String index, int shardId) {
this(EMPTY_MATCHES, EMPTY_HL, count, EMPTY_SCORES, context, index, shardId);
}
public PercolateShardResponse(PercolateContext context, String index, int shardId) {
this(EMPTY_MATCHES, EMPTY_HL, 0, EMPTY_SCORES, context, index, shardId);
}
public BytesRef[] matches() {
return matches;
}
public float[] scores() {
return scores;
}
public long count() {
return count;
}
public int requestedSize() {
return requestedSize;
}
public List<Map<String, HighlightField>> hls() {
return hls;
}
public InternalFacets facets() {
return facets;
}
public InternalAggregations aggregations() {
return aggregations;
}
public byte percolatorTypeId() {
return percolatorTypeId;
}
public boolean isEmpty() {
return percolatorTypeId == 0x00;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
percolatorTypeId = in.readByte();
requestedSize = in.readVInt();
count = in.readVLong();
matches = new BytesRef[in.readVInt()];
for (int i = 0; i < matches.length; i++) {
matches[i] = in.readBytesRef();
}
scores = new float[in.readVInt()];
for (int i = 0; i < scores.length; i++) {
scores[i] = in.readFloat();
}
int size = in.readVInt();
for (int i = 0; i < size; i++) {
int mSize = in.readVInt();
Map<String, HighlightField> fields = new HashMap<String, HighlightField>();
for (int j = 0; j < mSize; j++) {
fields.put(in.readString(), HighlightField.readHighlightField(in));
}
hls.add(fields);
}
facets = InternalFacets.readOptionalFacets(in);
aggregations = InternalAggregations.readOptionalAggregations(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeByte(percolatorTypeId);
out.writeVLong(requestedSize);
out.writeVLong(count);
out.writeVInt(matches.length);
for (BytesRef match : matches) {
out.writeBytesRef(match);
}
out.writeVLong(scores.length);
for (float score : scores) {
out.writeFloat(score);
}
out.writeVInt(hls.size());
for (Map<String, HighlightField> hl : hls) {
out.writeVInt(hl.size());
for (Map.Entry<String, HighlightField> entry : hl.entrySet()) {
out.writeString(entry.getKey());
entry.getValue().writeTo(out);
}
}
out.writeOptionalStreamable(facets);
out.writeOptionalStreamable(aggregations);
}
}
| apache-2.0 |
cloudera/hue | desktop/core/src/desktop/js/ko/components/assist/assistHBaseEntry.js | 2382 | // Licensed to Cloudera, Inc. under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. Cloudera, Inc. licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import * as ko from 'knockout';
import apiHelper from 'api/apiHelper';
import huePubSub from 'utils/huePubSub';
class AssistHBaseEntry {
/**
* @param {object} options
* @param {object} options.definition
* @param {string} options.definition.name
* @constructor
*/
constructor(options) {
const self = this;
self.definition = options.definition;
self.path = self.definition.name;
self.entries = ko.observableArray([]);
self.loaded = false;
self.loading = ko.observable(false);
self.hasErrors = ko.observable(false);
self.hasEntries = ko.pureComputed(() => self.entries().length > 0);
}
loadEntries(callback) {
const self = this;
if (self.loading()) {
return;
}
self.loading(true);
self.hasErrors(false);
apiHelper.fetchHBase({
parent: self.definition,
successCallback: data => {
self.entries(
data.data.map(
obj =>
new AssistHBaseEntry({
definition: obj
})
)
);
self.loaded = true;
self.loading(false);
if (callback) {
callback();
}
},
errorCallback: () => {
self.hasErrors(true);
self.loading(false);
if (callback) {
callback();
}
}
});
}
open() {
huePubSub.publish('assist.clickHBaseItem', this);
}
click() {
huePubSub.publish('assist.clickHBaseItem', this);
}
dblClick() {
huePubSub.publish('assist.dblClickHBaseItem', this);
}
}
export default AssistHBaseEntry;
| apache-2.0 |
consulo/consulo-spring | webflow/src/com/intellij/spring/webflow/config/model/xml/impl/version2_0/FlowExecutorImpl.java | 475 | package com.intellij.spring.webflow.config.model.xml.impl.version2_0;
import com.intellij.spring.impl.model.DomSpringBeanImpl;
import com.intellij.spring.webflow.config.model.xml.version2_0.FlowExecutor;
import org.jetbrains.annotations.NotNull;
@SuppressWarnings({"AbstractClassNeverImplemented"})
public abstract class FlowExecutorImpl extends DomSpringBeanImpl implements FlowExecutor {
@NotNull
public String getClassName() {
return FLOW_EXECUTOR_CLASS;
}
}
| apache-2.0 |
bstopp/acs-aem-commons | bundle/src/test/java/com/adobe/acs/commons/mcp/impl/processes/asset/UrlAssetImportTest.java | 9991 | /*
* #%L
* ACS AEM Commons Bundle
* %%
* Copyright (C) 2018 Adobe
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.adobe.acs.commons.mcp.impl.processes.asset;
import com.adobe.acs.commons.fam.ActionManager;
import com.adobe.acs.commons.fam.actions.Actions;
import com.adobe.acs.commons.functions.CheckedConsumer;
import com.adobe.acs.commons.data.CompositeVariant;
import com.adobe.acs.commons.data.Spreadsheet;
import com.day.cq.dam.api.Asset;
import com.day.cq.dam.api.AssetManager;
import com.google.common.base.Function;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import javax.jcr.RepositoryException;
import org.apache.jackrabbit.JcrConstants;
import org.apache.sling.api.resource.PersistenceException;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.api.resource.ValueMap;
import org.apache.sling.commons.mime.MimeTypeService;
import org.apache.sling.testing.mock.sling.ResourceResolverType;
import org.apache.sling.testing.mock.sling.junit.SlingContext;
import org.junit.Before;
import org.junit.Test;
import org.junit.Rule;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
/**
* Provide code coverage for URL Asset Import
*/
@RunWith(MockitoJUnitRunner.class)
public class UrlAssetImportTest {
private static List<String> CASE_INSENSITIVE_HEADERS = Arrays.asList("Source", "Rendition", "Target",
"Original");
@Rule
public final SlingContext context = new SlingContext(ResourceResolverType.JCR_MOCK);
@Mock
private ActionManager actionManager;
@Mock
private AssetManager assetManager;
private UrlAssetImport importProcess = null;
@Before
public void setUp() throws PersistenceException {
context.registerAdapter(ResourceResolver.class, AssetManager.class, new Function<ResourceResolver, AssetManager>() {
@Nullable
@Override
public AssetManager apply(@Nullable ResourceResolver input) {
return assetManager;
}
});
context.registerAdapter(Resource.class, Asset.class, new Function<Resource, Asset>() {
@Nullable
@Override
public Asset apply(@Nullable Resource input) {
return mock(Asset.class);
}
});
context.create().resource("/content/dam", JcrConstants.JCR_PRIMARYTYPE, "sling:Folder");
context.resourceResolver().commit();
doAnswer(invocation -> {
String path = (String) invocation.getArguments()[0];
context.create().resource(path, JcrConstants.JCR_PRIMARYTYPE, "dam:Asset");
context.create().resource(path + "/jcr:content", JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured");
context.create().resource(path + "/jcr:content/metadata", JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured");
return mock(Asset.class);
}).when(assetManager).createAsset(any(String.class), any(InputStream.class), any(String.class), any(Boolean.class));
importProcess = new UrlAssetImport(context.getService(MimeTypeService.class), null);
importProcess.fileData = new Spreadsheet(true, "source", "target", "rendition", "original","dc:title", "dc:attr");
importProcess.dryRunMode = false;
doAnswer(invocation -> {
CheckedConsumer<ResourceResolver> method = (CheckedConsumer<ResourceResolver>) invocation.getArguments()[0];
method.accept(context.resourceResolver());
return null;
}).when(actionManager).deferredWithResolver(any(CheckedConsumer.class));
Actions.setCurrentActionManager(actionManager);
}
private void addImportRow(String... cols) {
List<String> header = importProcess.fileData.getHeaderRow();
Map<String, CompositeVariant> row = new HashMap<>();
for (int i = 0; i < cols.length && i < header.size(); i++) {
row.put(header.get(i), new CompositeVariant(cols[i]));
}
importProcess.fileData.getDataRowsAsCompositeVariants().add(row);
}
@Test
public void testImportFile() throws IOException, RepositoryException {
importProcess.init();
URL testImg = getClass().getResource("/img/test.png");
addImportRow(testImg.toString(), "/content/dam/test");
addImportRow(testImg.toString(), "/content/dam/test", "rendition", "test.png");
importProcess.files = importProcess.extractFilesAndFolders(importProcess.fileData.getDataRowsAsCompositeVariants());
importProcess.createFolders(actionManager);
importProcess.importAssets(actionManager);
importProcess.updateMetadata(actionManager);
importProcess.importRenditions(actionManager);
assertEquals(1, importProcess.getCount(importProcess.importedAssets));
assertEquals(1, importProcess.getCount(importProcess.createdFolders));
}
@Test
public void testFolderTitlePreserve() throws IOException, RepositoryException {
context.load().json("/com/adobe/acs/commons/mcp/impl/processes/asset-ingestor.json", "/content/dam/testfolder");
importProcess.init();
importProcess.preserveFolderTitles = true;
URL testImg = getClass().getResource("/img/test.png");
addImportRow(testImg.toString(), "/content/dam/testfolder/test");
addImportRow(testImg.toString(), "/content/dam/testfolder/test", "rendition", "test.png");
importProcess.files = importProcess.extractFilesAndFolders(importProcess.fileData.getDataRowsAsCompositeVariants());
importProcess.createFolders(actionManager);
assertEquals(1, importProcess.getCount(importProcess.createdFolders));
context.currentResource("/content/dam/testfolder/jcr:content");
ValueMap vm = context.currentResource().getValueMap();
assertEquals("Test Folder", vm.get("jcr:title"));
}
@Test
public void testFolderNoTitlePreserve() throws IOException, RepositoryException {
context.load().json("/com/adobe/acs/commons/mcp/impl/processes/asset-ingestor.json", "/content/dam/testfolder");
importProcess.init();
importProcess.preserveFolderTitles = false;
URL testImg = getClass().getResource("/img/test.png");
addImportRow(testImg.toString(), "/content/dam/testfolder/test");
addImportRow(testImg.toString(), "/content/dam/testfolder/test", "rendition", "test.png");
importProcess.files = importProcess.extractFilesAndFolders(importProcess.fileData.getDataRowsAsCompositeVariants());
importProcess.createFolders(actionManager);
assertEquals(1, importProcess.getCount(importProcess.createdFolders));
context.currentResource("/content/dam/testfolder/jcr:content");
ValueMap vm = context.currentResource().getValueMap();
assertEquals("testfolder", vm.get("jcr:title"));
}
@Test
public void testImportFile404() throws IOException, RepositoryException {
importProcess.init();
URL testImg = getClass().getResource("/img/test.png");
addImportRow(testImg.toString(), "/content/dam/test");
addImportRow(testImg.toString(), "/content/dam/test", "rendition", "test.png");
addImportRow(testImg.toString() + "-404", "/content/dam/other", "rendition", "no-original-found");
importProcess.files = importProcess.extractFilesAndFolders(importProcess.fileData.getDataRowsAsCompositeVariants());
importProcess.createFolders(actionManager);
assertEquals(2, importProcess.getCount(importProcess.createdFolders));
importProcess.importAssets(actionManager);
importProcess.updateMetadata(actionManager);
importProcess.importRenditions(actionManager);
}
@Test
public void testAddedCamelCaseProperties() throws IOException, RepositoryException {
importProcess.fileData = new Spreadsheet(true, CASE_INSENSITIVE_HEADERS,
"source", "target", "rendition", "original", "dc:title", "test:camelCase");
importProcess.init();
URL testImg = getClass().getResource("/img/test.png");
final String expectedTitle = "title";
final String expectedCamelCaseProp = "come test value";
addImportRow(testImg.toString(), "/content/dam/test", "", "", expectedTitle, expectedCamelCaseProp);
importProcess.files = importProcess.extractFilesAndFolders(
importProcess.fileData.getDataRowsAsCompositeVariants());
importProcess.createFolders(actionManager);
importProcess.importAssets(actionManager);
importProcess.updateMetadata(actionManager);
Resource metadata = context.resourceResolver().getResource("/content/dam/test/test.png/jcr:content/metadata");
ValueMap valueMap = metadata.getValueMap();
assertEquals(expectedTitle, valueMap.get("dc:title"));
assertEquals(expectedCamelCaseProp, valueMap.get("test:camelCase"));
}
}
| apache-2.0 |
svagionitis/aws-sdk-cpp | aws-cpp-sdk-ec2/source/model/DeleteFlowLogsResponse.cpp | 2200 | /*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/ec2/model/DeleteFlowLogsResponse.h>
#include <aws/core/utils/xml/XmlSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/logging/LogMacros.h>
#include <utility>
using namespace Aws::EC2::Model;
using namespace Aws::Utils::Xml;
using namespace Aws::Utils::Logging;
using namespace Aws::Utils;
using namespace Aws;
DeleteFlowLogsResponse::DeleteFlowLogsResponse()
{
}
DeleteFlowLogsResponse::DeleteFlowLogsResponse(const AmazonWebServiceResult<XmlDocument>& result)
{
*this = result;
}
DeleteFlowLogsResponse& DeleteFlowLogsResponse::operator =(const AmazonWebServiceResult<XmlDocument>& result)
{
const XmlDocument& xmlDocument = result.GetPayload();
XmlNode rootNode = xmlDocument.GetRootElement();
XmlNode resultNode = rootNode;
if (rootNode.GetName() != "DeleteFlowLogsResponse")
{
resultNode = rootNode.FirstChild("DeleteFlowLogsResponse");
}
if(!resultNode.IsNull())
{
XmlNode unsuccessfulNode = resultNode.FirstChild("unsuccessful");
if(!unsuccessfulNode.IsNull())
{
XmlNode unsuccessfulMember = unsuccessfulNode.FirstChild("item");
while(!unsuccessfulMember.IsNull())
{
m_unsuccessful.push_back(unsuccessfulMember);
unsuccessfulMember = unsuccessfulMember.NextNode("item");
}
}
}
XmlNode responseMetadataNode = rootNode.FirstChild("ResponseMetadata");
m_responseMetadata = responseMetadataNode;
AWS_LOGSTREAM_DEBUG("Aws::EC2::Model::DeleteFlowLogsResponse", "x-amzn-request-id: " << m_responseMetadata.GetRequestId() );
return *this;
}
| apache-2.0 |
KaiserpfalzEDV/kp-office | kp-finance-root/kp-finance-chartofaccounts/src/main/java/de/kaiserpfalzedv/office/finance/chartofaccounts/api/chartofaccounts/ChartOfAccountsAlreadyExistsException.java | 1267 | /*
* Copyright 2017 Kaiserpfalz EDV-Service, Roland T. Lichti
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.kaiserpfalzedv.office.finance.chartofaccounts.api.chartofaccounts;
/**
* @author klenkes
* @version 2015Q1
* @since 04.01.16 04:42
*/
public class ChartOfAccountsAlreadyExistsException extends ChartOfAccountsException {
private static final long serialVersionUID = -4403712558954899958L;
private String chartOfAccounts;
public ChartOfAccountsAlreadyExistsException(final String chartOfAccounts) {
super("Chart of account '" + chartOfAccounts + "' does not exist.");
this.chartOfAccounts = chartOfAccounts;
}
public String getChartOfAccounts() {
return chartOfAccounts;
}
}
| apache-2.0 |
allotria/intellij-community | java/java-impl/src/com/intellij/codeInspection/classCanBeRecord/ClassCanBeRecordInspection.java | 4708 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.classCanBeRecord;
import com.intellij.codeInsight.daemon.impl.UnusedSymbolUtil;
import com.intellij.codeInsight.daemon.impl.analysis.HighlightingFeature;
import com.intellij.codeInspection.classCanBeRecord.ConvertToRecordFix.RecordCandidate;
import com.intellij.codeInspection.ui.InspectionOptionsPanel;
import com.intellij.codeInspection.util.InspectionMessage;
import com.intellij.java.JavaBundle;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiIdentifier;
import com.intellij.util.ui.CheckBox;
import com.intellij.util.ui.JBUI;
import com.siyeh.ig.BaseInspection;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.InspectionGadgetsFix;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
public class ClassCanBeRecordInspection extends BaseInspection {
@NotNull
public ConversionStrategy myConversionStrategy = ConversionStrategy.SHOW_AFFECTED_MEMBERS;
public boolean suggestAccessorsRenaming;
public ClassCanBeRecordInspection() {
}
public ClassCanBeRecordInspection(@NotNull ConversionStrategy conversionStrategy, boolean suggestAccessorsRenaming) {
myConversionStrategy = conversionStrategy;
this.suggestAccessorsRenaming = suggestAccessorsRenaming;
}
@Override
public boolean shouldInspect(PsiFile file) {
return HighlightingFeature.RECORDS.isAvailable(file);
}
@Override
protected @NotNull @InspectionMessage String buildErrorString(Object... infos) {
return JavaBundle.message("class.can.be.record.display.name");
}
@Override
public BaseInspectionVisitor buildVisitor() {
return new ClassCanBeRecordVisitor(myConversionStrategy != ConversionStrategy.DO_NOT_SUGGEST, suggestAccessorsRenaming);
}
@Override
protected boolean buildQuickFixesOnlyForOnTheFlyErrors() {
return myConversionStrategy == ConversionStrategy.SHOW_AFFECTED_MEMBERS;
}
@Override
protected @Nullable InspectionGadgetsFix buildFix(Object... infos) {
return new ConvertToRecordFix(myConversionStrategy == ConversionStrategy.SHOW_AFFECTED_MEMBERS, suggestAccessorsRenaming);
}
@Override
public @Nullable JComponent createOptionsPanel() {
JPanel panel = new InspectionOptionsPanel();
panel.add(new CheckBox(JavaBundle.message("class.can.be.record.suggest.renaming.accessors"), this,
"suggestAccessorsRenaming"));
panel.add(new JLabel(JavaBundle.message("class.can.be.record.conversion.weakens.member")));
ButtonGroup butGr = new ButtonGroup();
for (ConversionStrategy strategy : ConversionStrategy.values()) {
JRadioButton radioBut = new JRadioButton(strategy.getMessage(), strategy == myConversionStrategy);
radioBut.addActionListener(e -> myConversionStrategy = strategy);
radioBut.setBorder(JBUI.Borders.emptyLeft(20));
butGr.add(radioBut);
panel.add(radioBut);
}
return panel;
}
private static class ClassCanBeRecordVisitor extends BaseInspectionVisitor {
private final boolean myRenameIfWeakenVisibility;
private final boolean mySuggestAccessorsRenaming;
private ClassCanBeRecordVisitor(boolean renameIfWeakenVisibility, boolean suggestAccessorsRenaming) {
myRenameIfWeakenVisibility = renameIfWeakenVisibility;
mySuggestAccessorsRenaming = suggestAccessorsRenaming;
}
@Override
public void visitClass(PsiClass aClass) {
super.visitClass(aClass);
PsiIdentifier classIdentifier = aClass.getNameIdentifier();
if (classIdentifier == null) return;
RecordCandidate recordCandidate = ConvertToRecordFix.getClassDefinition(aClass, mySuggestAccessorsRenaming);
if (recordCandidate == null) return;
if (!myRenameIfWeakenVisibility && !ConvertToRecordProcessor.findWeakenVisibilityUsages(recordCandidate).isEmpty()) return;
if (UnusedSymbolUtil.isImplicitUsage(aClass.getProject(), aClass)) return;
registerError(classIdentifier);
}
}
public enum ConversionStrategy {
DO_NOT_SUGGEST("class.can.be.record.conversion.strategy.do.not.convert"),
SHOW_AFFECTED_MEMBERS("class.can.be.record.conversion.strategy.show.members"),
SILENTLY("class.can.be.record.conversion.strategy.convert.silently");
@Nls
private final String messageKey;
ConversionStrategy(@Nls String messageKey) {
this.messageKey = messageKey;
}
@Nls
String getMessage() {
return JavaBundle.message(messageKey);
}
}
}
| apache-2.0 |
flowable/flowable-engine | modules/flowable-cmmn-engine/src/test/java/org/flowable/cmmn/test/listener/TestLeaveUserEventListener.java | 1392 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.cmmn.test.listener;
import org.flowable.cmmn.api.delegate.DelegatePlanItemInstance;
import org.flowable.cmmn.api.runtime.PlanItemDefinitionType;
import org.flowable.cmmn.api.runtime.PlanItemInstanceState;
public class TestLeaveUserEventListener extends AbstractTestLifecycleListener {
@Override
public String getSourceState() {
return PlanItemInstanceState.AVAILABLE;
}
@Override
public String getTargetState() {
return null;
}
@Override
public void stateChanged(DelegatePlanItemInstance planItemInstance, String oldState, String newState) {
if (PlanItemDefinitionType.USER_EVENT_LISTENER.equals(planItemInstance.getPlanItemDefinitionType())) {
events.add(new TestLifeCycleEvent(planItemInstance, oldState, newState));
}
}
}
| apache-2.0 |
h2oai/h2o-3 | h2o-core/src/test/java/water/parser/ParseTestEncrypted.java | 6963 | package water.parser;
import org.apache.commons.io.IOUtils;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import water.*;
import water.api.schemas3.ParseSetupV3;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.FileUtils;
import javax.crypto.Cipher;
import javax.crypto.KeyGenerator;
import javax.crypto.SecretKey;
import java.io.*;
import java.security.KeyStore;
import java.util.Arrays;
import java.util.zip.GZIPOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import static org.junit.Assert.*;
@RunWith(Parameterized.class)
public class ParseTestEncrypted extends TestUtil {
@ClassRule
public static TemporaryFolder tmp = new TemporaryFolder();
private static String PLAINTEXT_FILE = "smalldata/demos/citibike_20k.csv";
private static String KEYSTORE_TYPE = "JCEKS"; // Note: need to use JCEKS, default JKS cannot store non-private keys!
private static String MY_CIPHER_SPEC = "AES/ECB/PKCS5Padding";
private static char[] MY_PASSWORD = "Password123".toCharArray();
private static String MY_KEY_ALIAS = "secretKeyAlias";
private static File _jks;
@Parameterized.Parameter
public String _encrypted_name;
@Parameterized.Parameters
public static Iterable<? extends Object> data() {
return Arrays.asList("encrypted.csv.aes", "encrypted.zip.aes", "encrypted.gz.aes");
}
@BeforeClass
public static void setup() throws Exception {
SecretKey secretKey = generateSecretKey();
// KeyStore
_jks = writeKeyStore(secretKey);
// Encrypted CSV
writeEncrypted(FileUtils.getFile(PLAINTEXT_FILE), tmp.newFile("encrypted.csv.aes"), secretKey);
// CSV in an Encrypted Zip container
writeEncryptedZip(FileUtils.getFile(PLAINTEXT_FILE), tmp.newFile("encrypted.zip.aes"), secretKey);
// CSV in an Encrypted Gzip container
writeEncryptedGzip(FileUtils.getFile(PLAINTEXT_FILE), tmp.newFile("encrypted.gz.aes"), secretKey);
TestUtil.stall_till_cloudsize(1);
}
private static SecretKey generateSecretKey() throws Exception {
KeyGenerator keyGen = KeyGenerator.getInstance("AES");
keyGen.init(128);
return keyGen.generateKey();
}
private static File writeKeyStore(SecretKey secretKey) throws Exception {
KeyStore ks = KeyStore.getInstance(KEYSTORE_TYPE);
ks.load(null);
KeyStore.ProtectionParameter protParam = new KeyStore.PasswordProtection(MY_PASSWORD);
KeyStore.SecretKeyEntry skEntry = new KeyStore.SecretKeyEntry(secretKey);
ks.setEntry(MY_KEY_ALIAS, skEntry, protParam);
File jks = tmp.newFile("mykeystore.jks");
FileOutputStream fos = null;
try {
fos = new FileOutputStream(jks);
ks.store(fos, MY_PASSWORD);
fos.close();
} finally {
IOUtils.closeQuietly(fos);
}
return jks;
}
private static void writeEncrypted(File source, File target, SecretKey secretKey) throws Exception {
FileOutputStream fileOut = new FileOutputStream(target);
try {
encryptFile(source, fileOut, secretKey);
} finally {
IOUtils.closeQuietly(fileOut);
}
}
private static void writeEncryptedGzip(File source, File target, SecretKey secretKey) throws Exception {
File tmpFile = tmp.newFile();
try (InputStream sourceIn = new FileInputStream(source);
FileOutputStream fileOut = new FileOutputStream(tmpFile);
GZIPOutputStream gzipOutput = new GZIPOutputStream(fileOut)) {
IOUtils.copyLarge(sourceIn, gzipOutput);
}
writeEncrypted(tmpFile, target, secretKey);
}
private static void writeEncryptedZip(File source, File target, SecretKey secretKey) throws Exception {
File tmpFile = tmp.newFile();
FileOutputStream tmpOut = new FileOutputStream(tmpFile);
try {
ZipOutputStream zipOut = new ZipOutputStream(tmpOut);
ZipEntry ze = new ZipEntry(source.getName());
zipOut.putNextEntry(ze);
try (InputStream sourceIn = new FileInputStream(source)) {
IOUtils.copyLarge(sourceIn, zipOut);
}
zipOut.closeEntry();
zipOut.close();
} finally {
IOUtils.closeQuietly(tmpOut);
}
writeEncrypted(tmpFile, target, secretKey);
}
private static void encryptFile(File source, OutputStream outputStream, SecretKey secretKey) throws Exception {
FileInputStream inputStream = new FileInputStream(source);
try {
Cipher cipher = Cipher.getInstance(MY_CIPHER_SPEC);
cipher.init(Cipher.ENCRYPT_MODE, secretKey);
byte[] inputBytes = new byte[(int) source.length()];
IOUtils.readFully(inputStream, inputBytes);
byte[] outputBytes = cipher.doFinal(inputBytes);
outputStream.write(outputBytes);
inputStream.close();
} finally {
IOUtils.closeQuietly(inputStream);
}
}
@Test
public void testParseEncrypted() {
Scope.enter();
try {
// 1. Upload the Keystore file
Vec jksVec = Scope.track(makeNfsFileVec(_jks.getAbsolutePath()));
// 2. Set Decryption Tool Parameters
DecryptionTool.DecryptionSetup ds = new DecryptionTool.DecryptionSetup();
ds._decrypt_tool_id = Key.make("aes_decrypt_tool");
ds._keystore_id = jksVec._key;
ds._key_alias = MY_KEY_ALIAS;
ds._keystore_type = KEYSTORE_TYPE;
ds._password = MY_PASSWORD;
ds._cipher_spec = MY_CIPHER_SPEC;
// 3. Instantiate & Install the Decryption Tool into DKV
Keyed<DecryptionTool> dt = Scope.track_generic(DecryptionTool.make(ds));
// 4. Load encrypted file into a ByteVec
Vec encVec = Scope.track(makeNfsFileVec(new File(tmp.getRoot(), _encrypted_name).getAbsolutePath()));
// 5. Create Parse Setup with a given Decryption Tool
ParseSetup ps = new ParseSetup(new ParseSetupV3()).setDecryptTool(dt._key);
ParseSetup guessedSetup = ParseSetup.guessSetup(new Key[]{encVec._key}, ps);
assertEquals("aes_decrypt_tool", guessedSetup._decrypt_tool.toString());
assertEquals("CSV", guessedSetup._parse_type.name());
// 6. Parse encrypted dataset
Key<Frame> fKey = Key.make("decrypted_frame");
Frame decrypted = Scope.track(ParseDataset.parse(fKey, new Key[]{encVec._key}, false, guessedSetup));
// 7. Compare with source dataset
Frame plaintext = Scope.track(parseTestFile(PLAINTEXT_FILE));
assertArrayEquals(plaintext._names, decrypted._names);
for (String n : plaintext._names) {
switch (plaintext.vec(n).get_type_str()) {
case "String":
assertStringVecEquals(plaintext.vec(n), decrypted.vec(n));
break;
case "Enum":
assertCatVecEquals(plaintext.vec(n), decrypted.vec(n));
break;
default:
assertVecEquals(plaintext.vec(n), decrypted.vec(n), 0.001);
}
}
} finally {
Scope.exit();
}
}
}
| apache-2.0 |
burris/dwr | protocol/json/main/java/org/directwebremoting/jsonrpc/JsonRpcConstants.java | 1652 | /*
* Copyright 2005 Joe Walker
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.directwebremoting.jsonrpc;
/**
* Various constants from the JSON-RPC spec:
* http://groups.google.com/group/json-rpc/web/json-rpc-1-2-proposal
* @author Joe Walker [joe at getahead dot ltd dot uk]
*/
public interface JsonRpcConstants
{
/**
* Parse error
* Invalid JSON. An error occurred on the server while parsing the JSON text
*/
public static int ERROR_CODE_PARSE = -32700;
/**
* Invalid Request: The received JSON not a valid JSON-RPC Request
*/
public static int ERROR_CODE_INVALID = -32600;
/**
* Method not found.
* The requested remote-procedure does not exist / is not available.
*/
public static int ERROR_CODE_NO_METHOD = -32601;
/**
* Invalid method parameters.
*/
public static int ERROR_CODE_BAD_PARAMS = -32602;
/**
* Internal JSON-RPC error.
*/
public static int ERROR_CODE_INTERNAL = -32603;
/**
* Reserved for implementation-defined server-errors.
*/
public static int ERROR_CODE_SERVER = -32000;
}
| apache-2.0 |
legal90/chef | lib/chef/exceptions.rb | 17513 | #
# Author:: Adam Jacob (<adam@opscode.com>)
# Author:: Seth Falcon (<seth@opscode.com>)
# Author:: Kyle Goodwin (<kgoodwin@primerevenue.com>)
# Copyright:: Copyright 2008-2010 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Chef
# == Chef::Exceptions
# Chef's custom exceptions are all contained within the Chef::Exceptions
# namespace.
class Exceptions
# Backcompat with Chef::ShellOut code:
require 'mixlib/shellout/exceptions'
def self.const_missing(const_name)
if const_name == :ShellCommandFailed
Chef::Log.warn("Chef::Exceptions::ShellCommandFailed is deprecated, use Mixlib::ShellOut::ShellCommandFailed")
called_from = caller[0..3].inject("Called from:\n") {|msg, trace_line| msg << " #{trace_line}\n" }
Chef::Log.warn(called_from)
Mixlib::ShellOut::ShellCommandFailed
else
super
end
end
class Application < RuntimeError; end
class Cron < RuntimeError; end
class Env < RuntimeError; end
class Exec < RuntimeError; end
class ErlCall < RuntimeError; end
class FileNotFound < RuntimeError; end
class Package < RuntimeError; end
class Service < RuntimeError; end
class Script < RuntimeError; end
class Route < RuntimeError; end
class SearchIndex < RuntimeError; end
class Override < RuntimeError; end
class UnsupportedAction < RuntimeError; end
class MissingLibrary < RuntimeError; end
class CannotDetermineNodeName < RuntimeError
def initialize
super "Unable to determine node name: configure node_name or configure the system's hostname and fqdn"
end
end
class User < RuntimeError; end
class Group < RuntimeError; end
class Link < RuntimeError; end
class Mount < RuntimeError; end
class PrivateKeyMissing < RuntimeError; end
class CannotWritePrivateKey < RuntimeError; end
class RoleNotFound < RuntimeError; end
class DuplicateRole < RuntimeError; end
class ValidationFailed < ArgumentError; end
class InvalidPrivateKey < ArgumentError; end
class ConfigurationError < ArgumentError; end
class RedirectLimitExceeded < RuntimeError; end
class AmbiguousRunlistSpecification < ArgumentError; end
class CookbookFrozen < ArgumentError; end
class CookbookNotFound < RuntimeError; end
# Cookbook loader used to raise an argument error when cookbook not found.
# for back compat, need to raise an error that inherits from ArgumentError
class CookbookNotFoundInRepo < ArgumentError; end
class RecipeNotFound < ArgumentError; end
class AttributeNotFound < RuntimeError; end
class MissingCookbookDependency < StandardError; end # CHEF-5120
class InvalidCommandOption < RuntimeError; end
class CommandTimeout < RuntimeError; end
class RequestedUIDUnavailable < RuntimeError; end
class InvalidHomeDirectory < ArgumentError; end
class DsclCommandFailed < RuntimeError; end
class PlistUtilCommandFailed < RuntimeError; end
class UserIDNotFound < ArgumentError; end
class GroupIDNotFound < ArgumentError; end
class ConflictingMembersInGroup < ArgumentError; end
class InvalidResourceReference < RuntimeError; end
class ResourceNotFound < RuntimeError; end
class VerificationNotFound < RuntimeError; end
# Can't find a Resource of this type that is valid on this platform.
class NoSuchResourceType < NameError
def initialize(short_name, node)
super "Cannot find a resource for #{short_name} on #{node[:platform]} version #{node[:platform_version]}"
end
end
class InvalidResourceSpecification < ArgumentError; end
class SolrConnectionError < RuntimeError; end
class IllegalChecksumRevert < RuntimeError; end
class CookbookVersionNameMismatch < ArgumentError; end
class MissingParentDirectory < RuntimeError; end
class UnresolvableGitReference < RuntimeError; end
class InvalidRemoteGitReference < RuntimeError; end
class InvalidEnvironmentRunListSpecification < ArgumentError; end
class InvalidDataBagItemID < ArgumentError; end
class InvalidDataBagName < ArgumentError; end
class EnclosingDirectoryDoesNotExist < ArgumentError; end
# Errors originating from calls to the Win32 API
class Win32APIError < RuntimeError; end
# Thrown when Win32 API layer binds to non-existent Win32 function. Occurs
# when older versions of Windows don't support newer Win32 API functions.
class Win32APIFunctionNotImplemented < NotImplementedError; end
# Attempting to run windows code on a not-windows node
class Win32NotWindows < RuntimeError; end
class WindowsNotAdmin < RuntimeError; end
# Attempting to access a 64-bit only resource on a 32-bit Windows system
class Win32ArchitectureIncorrect < RuntimeError; end
class ObsoleteDependencySyntax < ArgumentError; end
class InvalidDataBagPath < ArgumentError; end
class DuplicateDataBagItem < RuntimeError; end
class PowershellCmdletException < RuntimeError; end
class LCMParser < RuntimeError; end
class CannotDetermineHomebrewOwner < Package; end
# Can not create staging file during file deployment
class FileContentStagingError < RuntimeError
def initialize(errors)
super "Staging tempfile can not be created during file deployment.\n Errors: #{errors.join('\n')}!"
end
end
# A different version of a cookbook was added to a
# VersionedRecipeList than the one already there.
class CookbookVersionConflict < ArgumentError ; end
# does not follow X.Y.Z format. ArgumentError?
class InvalidPlatformVersion < ArgumentError; end
class InvalidCookbookVersion < ArgumentError; end
# version constraint should be a string or array, or it doesn't
# match OP VERSION. ArgumentError?
class InvalidVersionConstraint < ArgumentError; end
# Version constraints are not allowed in chef-solo
class IllegalVersionConstraint < NotImplementedError; end
class MetadataNotValid < StandardError; end
class MetadataNotFound < StandardError
attr_reader :install_path
attr_reader :cookbook_name
def initialize(install_path, cookbook_name)
@install_path = install_path
@cookbook_name = cookbook_name
super "No metadata.rb or metadata.json found for cookbook #{@cookbook_name} in #{@install_path}"
end
end
# File operation attempted but no permissions to perform it
class InsufficientPermissions < RuntimeError; end
# Ifconfig failed
class Ifconfig < RuntimeError; end
# Invalid "source" parameter to a remote_file resource
class InvalidRemoteFileURI < ArgumentError; end
# Node::Attribute computes the merged version of of attributes
# and makes it read-only. Attempting to modify a read-only
# attribute will cause this error.
class ImmutableAttributeModification < NoMethodError
def initialize
super "Node attributes are read-only when you do not specify which precedence level to set. " +
%Q(To set an attribute use code like `node.default["key"] = "value"')
end
end
# Merged node attributes are invalidated when the component
# attributes are updated. Attempting to read from a stale copy
# of merged attributes will trigger this error.
class StaleAttributeRead < StandardError; end
# Registry Helper throws the following errors
class Win32RegArchitectureIncorrect < Win32ArchitectureIncorrect; end
class Win32RegHiveMissing < ArgumentError; end
class Win32RegKeyMissing < RuntimeError; end
class Win32RegValueMissing < RuntimeError; end
class Win32RegDataMissing < RuntimeError; end
class Win32RegValueExists < RuntimeError; end
class Win32RegNoRecursive < ArgumentError; end
class Win32RegTypeDoesNotExist < ArgumentError; end
class Win32RegBadType < ArgumentError; end
class Win32RegBadValueSize < ArgumentError; end
class Win32RegTypesMismatch < ArgumentError; end
class InvalidEnvironmentPath < ArgumentError; end
class EnvironmentNotFound < RuntimeError; end
# File-like resource found a non-file (socket, pipe, directory, etc) at its destination
class FileTypeMismatch < RuntimeError; end
# File (or descendent) resource configured to manage symlink source, but
# the symlink that is there either loops or points to a nonexistent file
class InvalidSymlink < RuntimeError; end
class ChildConvergeError < RuntimeError; end
class NoProviderAvailable < RuntimeError; end
class DeprecatedFeatureError < RuntimeError;
def initalize(message)
super("#{message} (raising error due to treat_deprecation_warnings_as_errors being set)")
end
end
class MissingRole < RuntimeError
NULL = Object.new
attr_reader :expansion
def initialize(message_or_expansion=NULL)
@expansion = nil
case message_or_expansion
when NULL
super()
when String
super
when RunList::RunListExpansion
@expansion = message_or_expansion
missing_roles = @expansion.errors.join(', ')
super("The expanded run list includes nonexistent roles: #{missing_roles}")
end
end
end
# Exception class for collecting multiple failures. Used when running
# delayed notifications so that chef can process each delayed
# notification even if chef client or other notifications fail.
class MultipleFailures < StandardError
def initialize(*args)
super
@all_failures = []
end
def message
base = "Multiple failures occurred:\n"
@all_failures.inject(base) do |message, (location, error)|
message << "* #{error.class} occurred in #{location}: #{error.message}\n"
end
end
def client_run_failure(exception)
set_backtrace(exception.backtrace)
@all_failures << [ "chef run", exception ]
end
def notification_failure(exception)
@all_failures << [ "delayed notification", exception ]
end
def raise!
unless empty?
raise self.for_raise
end
end
def empty?
@all_failures.empty?
end
def for_raise
if @all_failures.size == 1
@all_failures[0][1]
else
self
end
end
end
class CookbookVersionSelection
# Compound exception: In run_list expansion and resolution,
# run_list items referred to cookbooks that don't exist and/or
# have no versions available.
class InvalidRunListItems < StandardError
attr_reader :non_existent_cookbooks
attr_reader :cookbooks_with_no_matching_versions
def initialize(message, non_existent_cookbooks, cookbooks_with_no_matching_versions)
super(message)
@non_existent_cookbooks = non_existent_cookbooks
@cookbooks_with_no_matching_versions = cookbooks_with_no_matching_versions
end
def to_json(*a)
result = {
"message" => message,
"non_existent_cookbooks" => non_existent_cookbooks,
"cookbooks_with_no_versions" => cookbooks_with_no_matching_versions
}
Chef::JSONCompat.to_json(result, *a)
end
end
# In run_list expansion and resolution, a constraint was
# unsatisfiable.
#
# This exception may not be the complete error report. If you
# resolve the misconfiguration represented by this exception and
# re-solve, you may get another exception
class UnsatisfiableRunListItem < StandardError
attr_reader :run_list_item
attr_reader :non_existent_cookbooks, :most_constrained_cookbooks
# most_constrained_cookbooks: if I were to remove constraints
# regarding these cookbooks, I would get a solution or move on
# to the next error (deeper in the graph). An item in this list
# may be unsatisfiable, but when resolved may also reveal
# further unsatisfiable constraints; this condition would not be
# reported.
def initialize(message, run_list_item, non_existent_cookbooks, most_constrained_cookbooks)
super(message)
@run_list_item = run_list_item
@non_existent_cookbooks = non_existent_cookbooks
@most_constrained_cookbooks = most_constrained_cookbooks
end
def to_json(*a)
result = {
"message" => message,
"unsatisfiable_run_list_item" => run_list_item,
"non_existent_cookbooks" => non_existent_cookbooks,
"most_constrained_cookbooks" => most_constrained_cookbooks
}
Chef::JSONCompat.to_json(result, *a)
end
end
end # CookbookVersionSelection
# When the server sends a redirect, RFC 2616 states a user-agent should
# not follow it with a method other than GET or HEAD, unless a specific
# action is taken by the user. A redirect received as response to a
# non-GET and non-HEAD request will thus raise an InvalidRedirect.
class InvalidRedirect < StandardError; end
# Raised when the content length of a download does not match the content
# length declared in the http response.
class ContentLengthMismatch < RuntimeError
def initialize(response_length, content_length)
super "Response body length #{response_length} does not match HTTP Content-Length header #{content_length}."
end
end
class UnsupportedPlatform < RuntimeError
def initialize(platform)
super "This functionality is not supported on platform #{platform}."
end
end
# Raised when Chef::Config[:run_lock_timeout] is set and some other client run fails
# to release the run lock becure Chef::Config[:run_lock_timeout] seconds pass.
class RunLockTimeout < RuntimeError
def initialize(duration, blocking_pid)
super "Unable to acquire lock. Waited #{duration} seconds for #{blocking_pid} to release."
end
end
class ChecksumMismatch < RuntimeError
def initialize(res_cksum, cont_cksum)
super "Checksum on resource (#{res_cksum}) does not match checksum on content (#{cont_cksum})"
end
end
class BadProxyURI < RuntimeError; end
# Raised by Chef::JSONCompat
class JSON
class EncodeError < RuntimeError; end
class ParseError < RuntimeError; end
end
class InvalidSearchQuery < ArgumentError; end
# Raised by Chef::ProviderResolver
class AmbiguousProviderResolution < RuntimeError
def initialize(resource, classes)
super "Found more than one provider for #{resource.resource_name} resource: #{classes}"
end
end
class AuditControlGroupDuplicate < RuntimeError
def initialize(name)
super "Control group with name '#{name}' has already been defined"
end
end
class AuditNameMissing < RuntimeError; end
class NoAuditsProvided < RuntimeError
def initialize
super "You must provide a block with controls"
end
end
class AuditsFailed < RuntimeError
def initialize(num_failed, num_total)
super "Audit phase found failures - #{num_failed}/#{num_total} controls failed"
end
end
# If a converge or audit fails, we want to wrap the output from those errors into 1 error so we can
# see both issues in the output. It is possible that nil will be provided. You must call `fill_backtrace`
# to correctly populate the backtrace with the wrapped backtraces.
class RunFailedWrappingError < RuntimeError
attr_reader :wrapped_errors
def initialize(*errors)
errors = errors.select {|e| !e.nil?}
output = "Found #{errors.size} errors, they are stored in the backtrace"
@wrapped_errors = errors
super output
end
def fill_backtrace
backtrace = []
wrapped_errors.each_with_index do |e,i|
backtrace << "#{i+1}) #{e.class} - #{e.message}"
backtrace += e.backtrace if e.backtrace
backtrace << ""
end
set_backtrace(backtrace)
end
end
class PIDFileLockfileMatch < RuntimeError
def initialize
super "PID file and lockfile are not permitted to match. Specify a different location with --pid or --lockfile"
end
end
class MultipleDscResourcesFound < RuntimeError
attr_reader :resources_found
def initialize(resources_found)
@resources_found = resources_found
matches_info = @resources_found.each do |r|
if r['Module'].nil?
"Resource #{r['Name']} was found in #{r['Module']['Name']}"
else
"Resource #{r['Name']} is a binary resource"
end
end
super "Found multiple matching resources. #{matches_info.join("\n")}"
end
end
end
end
| apache-2.0 |
prabushi/devstudio-tooling-esb | plugins/org.wso2.developerstudio.eclipse.gmf.esb.edit/src-gen/org/wso2/developerstudio/eclipse/gmf/esb/components/SendMediatorInputConnectorPropertiesEditionComponent.java | 6707 | /**
* Generated with Acceleo
*/
package org.wso2.developerstudio.eclipse.gmf.esb.components;
// Start of user code for imports
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.util.BasicDiagnostic;
import org.eclipse.emf.common.util.Diagnostic;
import org.eclipse.emf.common.util.WrappedException;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.emf.eef.runtime.api.notify.EStructuralFeatureNotificationFilter;
import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.api.notify.NotificationFilter;
import org.eclipse.emf.eef.runtime.context.PropertiesEditingContext;
import org.eclipse.emf.eef.runtime.impl.components.SinglePartPropertiesEditingComponent;
import org.eclipse.emf.eef.runtime.impl.filters.EObjectFilter;
import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.impl.utils.EEFConverterUtil;
import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbLink;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.SendMediatorInputConnector;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.SendMediatorInputConnectorPropertiesEditionPart;
// End of user code
/**
*
*
*/
public class SendMediatorInputConnectorPropertiesEditionComponent extends SinglePartPropertiesEditingComponent {
public static String BASE_PART = "Base"; //$NON-NLS-1$
/**
* Settings for incomingLinks ReferencesTable
*/
private ReferencesTableSettings incomingLinksSettings;
/**
* Default constructor
*
*/
public SendMediatorInputConnectorPropertiesEditionComponent(PropertiesEditingContext editingContext, EObject sendMediatorInputConnector, String editing_mode) {
super(editingContext, sendMediatorInputConnector, editing_mode);
parts = new String[] { BASE_PART };
repositoryKey = EsbViewsRepository.class;
partKey = EsbViewsRepository.SendMediatorInputConnector.class;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent#initPart(java.lang.Object, int, org.eclipse.emf.ecore.EObject,
* org.eclipse.emf.ecore.resource.ResourceSet)
*
*/
public void initPart(Object key, int kind, EObject elt, ResourceSet allResource) {
setInitializing(true);
if (editingPart != null && key == partKey) {
editingPart.setContext(elt, allResource);
final SendMediatorInputConnector sendMediatorInputConnector = (SendMediatorInputConnector)elt;
final SendMediatorInputConnectorPropertiesEditionPart basePart = (SendMediatorInputConnectorPropertiesEditionPart)editingPart;
// init values
if (isAccessible(EsbViewsRepository.SendMediatorInputConnector.Properties.incomingLinks)) {
incomingLinksSettings = new ReferencesTableSettings(sendMediatorInputConnector, EsbPackage.eINSTANCE.getInputConnector_IncomingLinks());
basePart.initIncomingLinks(incomingLinksSettings);
}
// init filters
if (isAccessible(EsbViewsRepository.SendMediatorInputConnector.Properties.incomingLinks)) {
basePart.addFilterToIncomingLinks(new EObjectFilter(EsbPackage.Literals.ESB_LINK));
// Start of user code for additional businessfilters for incomingLinks
// End of user code
}
// init values for referenced views
// init filters for referenced views
}
setInitializing(false);
}
/**
* {@inheritDoc}
* @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#associatedFeature(java.lang.Object)
*/
public EStructuralFeature associatedFeature(Object editorKey) {
if (editorKey == EsbViewsRepository.SendMediatorInputConnector.Properties.incomingLinks) {
return EsbPackage.eINSTANCE.getInputConnector_IncomingLinks();
}
return super.associatedFeature(editorKey);
}
/**
* {@inheritDoc}
* @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#updateSemanticModel(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent)
*
*/
public void updateSemanticModel(final IPropertiesEditionEvent event) {
SendMediatorInputConnector sendMediatorInputConnector = (SendMediatorInputConnector)semanticObject;
if (EsbViewsRepository.SendMediatorInputConnector.Properties.incomingLinks == event.getAffectedEditor()) {
if (event.getKind() == PropertiesEditionEvent.ADD) {
if (event.getNewValue() instanceof EsbLink) {
incomingLinksSettings.addToReference((EObject) event.getNewValue());
}
} else if (event.getKind() == PropertiesEditionEvent.REMOVE) {
incomingLinksSettings.removeFromReference((EObject) event.getNewValue());
} else if (event.getKind() == PropertiesEditionEvent.MOVE) {
incomingLinksSettings.move(event.getNewIndex(), (EsbLink) event.getNewValue());
}
}
}
/**
* {@inheritDoc}
* @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#updatePart(org.eclipse.emf.common.notify.Notification)
*/
public void updatePart(Notification msg) {
super.updatePart(msg);
if (editingPart.isVisible()) {
SendMediatorInputConnectorPropertiesEditionPart basePart = (SendMediatorInputConnectorPropertiesEditionPart)editingPart;
if (EsbPackage.eINSTANCE.getInputConnector_IncomingLinks().equals(msg.getFeature()) && isAccessible(EsbViewsRepository.SendMediatorInputConnector.Properties.incomingLinks))
basePart.updateIncomingLinks();
}
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#getNotificationFilters()
*/
@Override
protected NotificationFilter[] getNotificationFilters() {
NotificationFilter filter = new EStructuralFeatureNotificationFilter(
EsbPackage.eINSTANCE.getInputConnector_IncomingLinks() );
return new NotificationFilter[] {filter,};
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent#validateValue(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent)
*
*/
public Diagnostic validateValue(IPropertiesEditionEvent event) {
Diagnostic ret = Diagnostic.OK_INSTANCE;
if (event.getNewValue() != null) {
try {
} catch (IllegalArgumentException iae) {
ret = BasicDiagnostic.toDiagnostic(iae);
} catch (WrappedException we) {
ret = BasicDiagnostic.toDiagnostic(we);
}
}
return ret;
}
}
| apache-2.0 |
ONLYOFFICE/document-server-integration | web/documentserver-example/csharp/DocEditor.aspx.designer.cs | 764 | //------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace OnlineEditorsExample {
public partial class DocEditor {
/// <summary>
/// form1 control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlForm form1;
}
}
| apache-2.0 |
pwong-mapr/private-hue | apps/hbase/src/hbase/urls.py | 961 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('hbase.views',
url(r'^$', 'app', name='index'),
url(r'api/(?P<url>.+)$', 'api_router')
)
| apache-2.0 |
vadopolski/ignite | modules/core/src/main/java/org/apache/ignite/configuration/SqlConnectorConfiguration.java | 7634 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.configuration;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.jetbrains.annotations.Nullable;
/**
* SQL connector configuration.
*/
public class SqlConnectorConfiguration {
/** Default port. */
public static final int DFLT_PORT = 10800;
/** Default port range. */
public static final int DFLT_PORT_RANGE = 100;
/** Default socket send and receive buffer size. */
public static final int DFLT_SOCK_BUF_SIZE = 0;
/** Default value for {@code TCP_NODELAY} socket option (value is <tt>true</tt>). */
public static final boolean DFLT_TCP_NO_DELAY = true;
/** Default max number of open cursors per connection. */
public static final int DFLT_MAX_OPEN_CURSORS_PER_CONN = 128;
/** Default size of thread pool. */
public static final int DFLT_THREAD_POOL_SIZE = IgniteConfiguration.DFLT_PUBLIC_THREAD_CNT;
/** Host. */
private String host;
/** Port. */
private int port = DFLT_PORT;
/** Port range. */
private int portRange = DFLT_PORT_RANGE;
/** Socket send buffer size. */
private int sockSndBufSize = DFLT_SOCK_BUF_SIZE;
/** Socket receive buffer size. */
private int sockRcvBufSize = DFLT_SOCK_BUF_SIZE;
/** TCP no delay. */
private boolean tcpNoDelay = DFLT_TCP_NO_DELAY;
/** Max number of opened cursors per connection. */
private int maxOpenCursorsPerConn = DFLT_MAX_OPEN_CURSORS_PER_CONN;
/** Thread pool size. */
private int threadPoolSize = DFLT_THREAD_POOL_SIZE;
/**
* Creates SQL connector configuration with all default values.
*/
public SqlConnectorConfiguration() {
// No-op.
}
/**
* Creates SQL connector configuration by copying all properties from given configuration.
*
* @param cfg Configuration to copy.
*/
public SqlConnectorConfiguration(SqlConnectorConfiguration cfg) {
assert cfg != null;
host = cfg.getHost();
maxOpenCursorsPerConn = cfg.getMaxOpenCursorsPerConnection();
port = cfg.getPort();
portRange = cfg.getPortRange();
sockRcvBufSize = cfg.getSocketReceiveBufferSize();
sockSndBufSize = cfg.getSocketSendBufferSize();
tcpNoDelay = cfg.isTcpNoDelay();
threadPoolSize = cfg.getThreadPoolSize();
}
/**
* Get host.
*
* @return Host.
*/
@Nullable public String getHost() {
return host;
}
/**
* Set host.
*
* @param host Host.
* @return This instance for chaining.
*/
public SqlConnectorConfiguration setHost(@Nullable String host) {
this.host = host;
return this;
}
/**
* Get port.
*
* @return Port.
*/
public int getPort() {
return port;
}
/**
* Set port.
*
* @param port Port.
* @return This instance for chaining.
*/
public SqlConnectorConfiguration setPort(int port) {
this.port = port;
return this;
}
/**
* Get port range.
*
* @return Port range.
*/
public int getPortRange() {
return portRange;
}
/**
* Set port range.
*
* @param portRange Port range.
* @return This instance for chaining.
*/
public SqlConnectorConfiguration setPortRange(int portRange) {
this.portRange = portRange;
return this;
}
/**
* Gets socket send buffer size. When set to zero, operation system default will be used.
* <p>
* Defaults to {@link #DFLT_SOCK_BUF_SIZE}
*
* @return Socket send buffer size in bytes.
*/
public int getSocketSendBufferSize() {
return sockSndBufSize;
}
/**
* Sets socket send buffer size. See {@link #getSocketSendBufferSize()} for more information.
*
* @param sockSndBufSize Socket send buffer size in bytes.
* @return This instance for chaining.
*/
public SqlConnectorConfiguration setSocketSendBufferSize(int sockSndBufSize) {
this.sockSndBufSize = sockSndBufSize;
return this;
}
/**
* Gets socket receive buffer size. When set to zero, operation system default will be used.
* <p>
* Defaults to {@link #DFLT_SOCK_BUF_SIZE}.
*
* @return Socket receive buffer size in bytes.
*/
public int getSocketReceiveBufferSize() {
return sockRcvBufSize;
}
/**
* Sets socket receive buffer size. See {@link #getSocketReceiveBufferSize()} for more information.
*
* @param sockRcvBufSize Socket receive buffer size in bytes.
* @return This instance for chaining.
*/
public SqlConnectorConfiguration setSocketReceiveBufferSize(int sockRcvBufSize) {
this.sockRcvBufSize = sockRcvBufSize;
return this;
}
/**
* Get TCP NO_DELAY flag.
*
* @return TCP NO_DELAY flag.
*/
public boolean isTcpNoDelay() {
return tcpNoDelay;
}
/**
* Set TCP NO_DELAY flag.
*
* @param tcpNoDelay TCP NO_DELAY flag.
* @return This instance for chaining.
*/
public SqlConnectorConfiguration setTcpNoDelay(boolean tcpNoDelay) {
this.tcpNoDelay = tcpNoDelay;
return this;
}
/**
* Gets maximum number of opened cursors per connection.
* <p>
* Defaults to {@link #DFLT_MAX_OPEN_CURSORS_PER_CONN}.
*
* @return Maximum number of opened cursors.
*/
public int getMaxOpenCursorsPerConnection() {
return maxOpenCursorsPerConn;
}
/**
* Sets maximum number of opened cursors per connection.
*
* @param maxOpenCursorsPerConn Maximum number of opened cursors.
* @return This instance for chaining.
*/
public SqlConnectorConfiguration setMaxOpenCursorsPerConnection(int maxOpenCursorsPerConn) {
this.maxOpenCursorsPerConn = maxOpenCursorsPerConn;
return this;
}
/**
* Size of thread pool that is in charge of processing SQL requests.
* <p>
* Defaults {@link #DFLT_THREAD_POOL_SIZE}.
*
* @return Thread pool that is in charge of processing SQL requests.
*/
public int getThreadPoolSize() {
return threadPoolSize;
}
/**
* Sets thread pool that is in charge of processing SQL requests. See {@link #getThreadPoolSize()} for more
* information.
*
* @param threadPoolSize Thread pool that is in charge of processing SQL requests.
* @return This instance for chaining.
*/
public SqlConnectorConfiguration setThreadPoolSize(int threadPoolSize) {
this.threadPoolSize = threadPoolSize;
return this;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(SqlConnectorConfiguration.class, this);
}
}
| apache-2.0 |
medgar/click | examples/src/org/apache/click/examples/page/wizard/Step1.java | 4765 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.click.examples.page.wizard;
import java.util.ArrayList;
import java.util.List;
import org.apache.click.control.Option;
import org.apache.click.control.Select;
import org.apache.click.control.TextField;
import org.apache.click.dataprovider.DataProvider;
import org.apache.click.examples.domain.Address;
import org.apache.click.examples.domain.Client;
import org.apache.click.examples.domain.SystemCode;
import org.apache.click.examples.service.ClientService;
import org.apache.click.extras.control.DateField;
import org.apache.click.extras.control.EmailField;
/**
* The first step in the 3 step process is to capture the Client details.
* <p/>
* Note this Panel has no associated template.
*/
public class Step1 extends Step {
private static final long serialVersionUID = 1L;
// Variables --------------------------------------------------------------
/** The client domain object created through the wizard. */
private Client client;
/** The client service. */
private ClientService clientService;
// Constructors -----------------------------------------------------------
/**
* Construct Step1 with the specified name, label, description and page.
*
* @param name the step name
* @param label the step label
* @param description the step description
* @param page the wizard page
*/
public Step1(String name, String label, String description, WizardPage page) {
super(name, label, description, page);
Select titleSelect = new Select("title", true);
setupTitleSelect(titleSelect);
getForm().add(titleSelect);
getForm().add(new TextField("firstName", true));
getForm().add(new TextField("lastName", true));
getForm().add(new DateField("dateJoined", true));
getForm().add(new EmailField("email"));
client = WizardUils.getClientFromSession();
if (client != null) {
getForm().copyFrom(client);
}
}
// Public methods ---------------------------------------------------------
/**
* The onNext action of Step1 sets the Page to stateful, checks if the form
* is valid, moves to the next step in the process and passes the client to
* the next step.
*
* @return true if page processing should continue or not
*/
@Override
public boolean onNext() {
if (getForm().isValid()) {
// Only create client if no client was loaded from the session in this
// Step's constructor. This allows the user to freely navigate backwards
// and forwards through the wizard without overwriting a previous Client
// instance
if (client == null) {
ClientService service = getClientService();
client = service.createNewClient();
Address address = service.createNewAddress();
client.setAddress(address);
}
getForm().copyTo(client);
WizardUils.saveClientInSession(client);
getWizardPage().next();
}
return true;
}
public ClientService getClientService() {
if (clientService == null) {
clientService = new ClientService();
}
return clientService;
}
// Private methods --------------------------------------------------------
private void setupTitleSelect(Select select) {
select.setDefaultOption(Option.EMPTY_OPTION);
select.setDataProvider(new DataProvider() {
public List<Option> getData() {
List<Option> options = new ArrayList<Option>();
List<SystemCode> titles = getClientService().getTitles();
for (SystemCode title : titles) {
options.add(new Option(title.getValue(), title.getLabel()));
}
return options;
}
});
}
}
| apache-2.0 |
estebank/gitiles | gitiles-servlet/src/main/java/com/google/gitiles/LogSoyData.java | 4971 | // Copyright 2012 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gitiles;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.gitiles.CommitData.Field;
import com.google.template.soy.tofu.SoyTofu;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.revwalk.RevCommit;
import java.io.IOException;
import java.io.Writer;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
public class LogSoyData {
private static final ImmutableSet<Field> FIELDS = Sets.immutableEnumSet(Field.ABBREV_SHA,
Field.SHA, Field.URL, Field.SHORT_MESSAGE, Field.MESSAGE, Field.AUTHOR, Field.COMMITTER,
Field.BRANCHES, Field.TAGS);
private static final ImmutableSet<Field> VERBOSE_FIELDS = Field.setOf(FIELDS, Field.DIFF_TREE);
private final HttpServletRequest req;
private final GitilesView view;
private final Set<Field> fields;
private final String pretty;
private final String variant;
public LogSoyData(HttpServletRequest req, GitilesAccess access, String pretty)
throws IOException {
this.req = checkNotNull(req);
this.view = checkNotNull(ViewFilter.getView(req));
this.pretty = checkNotNull(pretty);
Config config = access.getConfig();
fields = config.getBoolean("logFormat", pretty, "verbose", false) ? VERBOSE_FIELDS : FIELDS;
variant = firstNonNull(config.getString("logFormat", pretty, "variant"), pretty);
}
public void renderStreaming(Paginator paginator, @Nullable String revision, Renderer renderer,
Writer out, DateFormatter df) throws IOException {
renderer.newRenderer("gitiles.logEntriesHeader")
.setData(toHeaderSoyData(paginator, revision))
.render(out);
out.flush();
SoyTofu.Renderer entryRenderer = renderer.newRenderer("gitiles.logEntryWrapper");
boolean first = true;
for (RevCommit c : paginator) {
entryRenderer.setData(toEntrySoyData(paginator, c, df, first)).render(out);
out.flush();
first = false;
}
if (first) {
renderer.newRenderer("gitiles.emptyLog").render(out);
}
renderer.newRenderer("gitiles.logEntriesFooter")
.setData(toFooterSoyData(paginator, revision))
.render(out);
}
private Map<String, Object> toHeaderSoyData(Paginator paginator, @Nullable String revision) {
Map<String, Object> data = Maps.newHashMapWithExpectedSize(5);
data.put("logEntryPretty", pretty);
ObjectId prev = paginator.getPreviousStart();
if (prev != null) {
GitilesView.Builder prevView = copyAndCanonicalizeView(revision);
if (!prevView.getRevision().getId().equals(prev)) {
prevView.replaceParam(LogServlet.START_PARAM, prev.name());
}
data.put("previousUrl", prevView.toUrl());
}
return data;
}
private Map<String, Object> toEntrySoyData(Paginator paginator, RevCommit c, DateFormatter df,
boolean first) throws IOException {
Map<String, Object> entry = new CommitSoyData().setRevWalk(paginator.getWalk())
.toSoyData(req, c, fields, df);
return ImmutableMap.of(
"firstWithPrevious", first && paginator.getPreviousStart() != null,
"variant", variant,
"entry", entry);
}
private Map<String, Object> toFooterSoyData(Paginator paginator, @Nullable String revision) {
Map<String, Object> data = Maps.newHashMapWithExpectedSize(1);
ObjectId next = paginator.getNextStart();
if (next != null) {
data.put("nextUrl", copyAndCanonicalizeView(revision)
.replaceParam(LogServlet.START_PARAM, next.name())
.toUrl());
}
return data;
}
private GitilesView.Builder copyAndCanonicalizeView(String revision) {
// Canonicalize the view by using full SHAs.
GitilesView.Builder copy = GitilesView.log().copyFrom(view);
if (view.getRevision() != Revision.NULL) {
copy.setRevision(view.getRevision());
} else {
copy.setRevision(Revision.named(revision));
}
if (view.getOldRevision() != Revision.NULL) {
copy.setOldRevision(view.getOldRevision());
}
return copy;
}
}
| apache-2.0 |
xasx/assertj-core | src/test/java/org/assertj/core/api/fail/Fail_fail_because_exception_was_not_thrown_Test.java | 1692 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2019 the original author or authors.
*/
package org.assertj.core.api.fail;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Fail.failBecauseExceptionWasNotThrown;
import org.assertj.core.api.Fail;
import org.junit.jupiter.api.Test;
/**
* Tests for <code>{@link Fail#shouldHaveThrown(Class)} (Class)}</code>.
*
* @author Joel Costigliola
*/
public class Fail_fail_because_exception_was_not_thrown_Test {
@Test
public void should_include_message_built_with_given_exception_name() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> failBecauseExceptionWasNotThrown(NullPointerException.class))
.withMessage("NullPointerException should have been thrown");
}
@Test
public void should_include_message_built_with_given_throwable_name() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> failBecauseExceptionWasNotThrown(OutOfMemoryError.class))
.withMessage("OutOfMemoryError should have been thrown");
}
}
| apache-2.0 |
Sonnbc/modelCheckingCassandra | src/java/org/apache/cassandra/cql3/statements/UpdateStatement.java | 17078 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.Pair;
import static org.apache.cassandra.cql.QueryProcessor.validateKey;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
/**
* An <code>UPDATE</code> statement parsed from a CQL query statement.
*
*/
public class UpdateStatement extends ModificationStatement
{
private CFDefinition cfDef;
// Provided for an UPDATE
private final List<Pair<ColumnIdentifier, Operation.RawUpdate>> operations;
private final List<Relation> whereClause;
// Provided for an INSERT
private final List<ColumnIdentifier> columnNames;
private final List<Term.Raw> columnValues;
private final List<Operation> processedColumns = new ArrayList<Operation>();
private final Map<ColumnIdentifier, List<Term>> processedKeys = new HashMap<ColumnIdentifier, List<Term>>();
private static final Operation setToEmptyOperation = new Constants.Setter(null, new Constants.Value(ByteBufferUtil.EMPTY_BYTE_BUFFER));
/**
* Creates a new UpdateStatement from a column family name, columns map, consistency
* level, and key term.
*
* @param name column family being operated on
* @param operations a map of column operations to perform
* @param whereClause the where clause
* @param attrs additional attributes for statement (CL, timestamp, timeToLive)
*/
public UpdateStatement(CFName name,
List<Pair<ColumnIdentifier, Operation.RawUpdate>> operations,
List<Relation> whereClause,
Attributes attrs)
{
super(name, attrs);
this.operations = operations;
this.whereClause = whereClause;
this.columnNames = null;
this.columnValues = null;
}
/**
* Creates a new UpdateStatement from a column family name, a consistency level,
* key, and lists of column names and values. It is intended for use with the
* alternate update format, <code>INSERT</code>.
*
* @param name column family being operated on
* @param columnNames list of column names
* @param columnValues list of column values (corresponds to names)
* @param attrs additional attributes for statement (CL, timestamp, timeToLive)
*/
public UpdateStatement(CFName name,
Attributes attrs,
List<ColumnIdentifier> columnNames,
List<Term.Raw> columnValues)
{
super(name, attrs);
this.columnNames = columnNames;
this.columnValues = columnValues;
this.operations = null;
this.whereClause = null;
}
protected void validateConsistency(ConsistencyLevel cl) throws InvalidRequestException
{
if (type == Type.COUNTER)
cl.validateCounterForWrite(cfDef.cfm);
else
cl.validateForWrite(cfDef.cfm.ksName);
}
/** {@inheritDoc} */
public Collection<IMutation> getMutations(List<ByteBuffer> variables, boolean local, ConsistencyLevel cl, long now)
throws RequestExecutionException, RequestValidationException
{
List<ByteBuffer> keys = buildKeyNames(cfDef, processedKeys, variables);
ColumnNameBuilder builder = cfDef.getColumnNameBuilder();
buildColumnNames(cfDef, processedKeys, builder, variables, true);
// Lists SET operation incurs a read.
Set<ByteBuffer> toRead = null;
for (Operation op : processedColumns)
{
if (op.requiresRead())
{
if (toRead == null)
toRead = new TreeSet<ByteBuffer>(UTF8Type.instance);
toRead.add(op.columnName.key);
}
}
Map<ByteBuffer, ColumnGroupMap> rows = toRead != null ? readRows(keys, builder, toRead, (CompositeType)cfDef.cfm.comparator, local, cl) : null;
Collection<IMutation> mutations = new LinkedList<IMutation>();
UpdateParameters params = new UpdateParameters(variables, getTimestamp(now), getTimeToLive(), rows);
for (ByteBuffer key: keys)
mutations.add(mutationForKey(cfDef, key, builder, params, cl));
return mutations;
}
// Returns the first empty component or null if none are
static CFDefinition.Name buildColumnNames(CFDefinition cfDef, Map<ColumnIdentifier, List<Term>> processed, ColumnNameBuilder builder, List<ByteBuffer> variables, boolean requireAllComponent)
throws InvalidRequestException
{
CFDefinition.Name firstEmpty = null;
for (CFDefinition.Name name : cfDef.columns.values())
{
List<Term> values = processed.get(name.name);
if (values == null || values.isEmpty())
{
firstEmpty = name;
if (requireAllComponent && cfDef.isComposite && !cfDef.isCompact)
throw new InvalidRequestException(String.format("Missing mandatory PRIMARY KEY part %s", name));
}
else if (firstEmpty != null)
{
throw new InvalidRequestException(String.format("Missing PRIMARY KEY part %s since %s is set", firstEmpty.name, name.name));
}
else
{
assert values.size() == 1; // We only allow IN for row keys so far
ByteBuffer val = values.get(0).bindAndGet(variables);
if (val == null)
throw new InvalidRequestException(String.format("Invalid null value for clustering key part %s", name));
builder.add(val);
}
}
return firstEmpty;
}
static List<ByteBuffer> buildKeyNames(CFDefinition cfDef, Map<ColumnIdentifier, List<Term>> processed, List<ByteBuffer> variables)
throws InvalidRequestException
{
ColumnNameBuilder keyBuilder = cfDef.getKeyNameBuilder();
List<ByteBuffer> keys = new ArrayList<ByteBuffer>();
for (CFDefinition.Name name : cfDef.keys.values())
{
List<Term> values = processed.get(name.name);
if (values == null)
throw new InvalidRequestException(String.format("Missing mandatory PRIMARY KEY part %s", name));
if (keyBuilder.remainingCount() == 1)
{
for (Term t : values)
{
ByteBuffer val = t.bindAndGet(variables);
if (val == null)
throw new InvalidRequestException(String.format("Invalid null value for partition key part %s", name));
keys.add(keyBuilder.copy().add(val).build());
}
}
else
{
if (values.isEmpty() || values.size() > 1)
throw new InvalidRequestException("IN is only supported on the last column of the partition key");
ByteBuffer val = values.get(0).bindAndGet(variables);
if (val == null)
throw new InvalidRequestException(String.format("Invalid null value for partition key part %s", name));
keyBuilder.add(val);
}
}
return keys;
}
/**
* Compute a row mutation for a single key
*
* @return row mutation
*
* @throws InvalidRequestException on the wrong request
*/
private IMutation mutationForKey(CFDefinition cfDef, ByteBuffer key, ColumnNameBuilder builder, UpdateParameters params, ConsistencyLevel cl)
throws InvalidRequestException
{
validateKey(key);
QueryProcessor.validateKey(key);
RowMutation rm = new RowMutation(cfDef.cfm.ksName, key);
ColumnFamily cf = rm.addOrGet(cfDef.cfm);
// Inserting the CQL row marker (see #4361)
// We always need to insert a marker, because of the following situation:
// CREATE TABLE t ( k int PRIMARY KEY, c text );
// INSERT INTO t(k, c) VALUES (1, 1)
// DELETE c FROM t WHERE k = 1;
// SELECT * FROM t;
// The last query should return one row (but with c == null). Adding
// the marker with the insert make sure the semantic is correct (while making sure a
// 'DELETE FROM t WHERE k = 1' does remove the row entirely)
if (cfDef.isComposite && !cfDef.isCompact)
{
ByteBuffer name = builder.copy().add(ByteBufferUtil.EMPTY_BYTE_BUFFER).build();
cf.addColumn(params.makeColumn(name, ByteBufferUtil.EMPTY_BYTE_BUFFER));
}
if (cfDef.isCompact)
{
if (builder.componentCount() == 0)
throw new InvalidRequestException(String.format("Missing PRIMARY KEY part %s", cfDef.columns.values().iterator().next()));
if (cfDef.value == null)
{
// compact + no compact value implies there is no column outside the PK. So no operation could
// have passed through validation
assert processedColumns.isEmpty();
setToEmptyOperation.execute(key, cf, builder.copy(), params);
}
else
{
// compact means we don't have a row marker, so don't accept to set only the PK. See CASSANDRA-5648.
if (processedColumns.isEmpty())
throw new InvalidRequestException(String.format("Column %s is mandatory for this COMPACT STORAGE table", cfDef.value));
for (Operation op : processedColumns)
op.execute(key, cf, builder.copy(), params);
}
}
else
{
for (Operation op : processedColumns)
op.execute(key, cf, builder.copy(), params);
}
return type == Type.COUNTER ? new CounterMutation(rm, cl) : rm;
}
public ParsedStatement.Prepared prepare(ColumnSpecification[] boundNames) throws InvalidRequestException
{
// Deal here with the keyspace overwrite thingy to avoid mistake
CFMetaData metadata = validateColumnFamily(keyspace(), columnFamily());
cfDef = metadata.getCfDef();
type = metadata.getDefaultValidator().isCommutative() ? Type.COUNTER : Type.LOGGED;
if (operations == null)
{
// Created from an INSERT
if (type == Type.COUNTER)
throw new InvalidRequestException("INSERT statement are not allowed on counter tables, use UPDATE instead");
if (columnNames.size() != columnValues.size())
throw new InvalidRequestException("Unmatched column names/values");
if (columnNames.isEmpty())
throw new InvalidRequestException("No columns provided to INSERT");
for (int i = 0; i < columnNames.size(); i++)
{
CFDefinition.Name name = cfDef.get(columnNames.get(i));
if (name == null)
throw new InvalidRequestException(String.format("Unknown identifier %s", columnNames.get(i)));
// For UPDATES, the parser validates we don't set the same value twice but we must check it here for INSERT
for (int j = 0; j < i; j++)
if (name.name.equals(columnNames.get(j)))
throw new InvalidRequestException(String.format("Multiple definitions found for column %s", name));
Term.Raw value = columnValues.get(i);
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
Term t = value.prepare(name);
t.collectMarkerSpecification(boundNames);
if (processedKeys.put(name.name, Collections.singletonList(t)) != null)
throw new InvalidRequestException(String.format("Multiple definitions found for PRIMARY KEY part %s", name));
break;
case VALUE_ALIAS:
case COLUMN_METADATA:
Operation operation = new Operation.SetValue(value).prepare(name);
operation.collectMarkerSpecification(boundNames);
processedColumns.add(operation);
break;
}
}
}
else
{
// Created from an UPDATE
for (Pair<ColumnIdentifier, Operation.RawUpdate> entry : operations)
{
CFDefinition.Name name = cfDef.get(entry.left);
if (name == null)
throw new InvalidRequestException(String.format("Unknown identifier %s", entry.left));
Operation operation = entry.right.prepare(name);
operation.collectMarkerSpecification(boundNames);
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("PRIMARY KEY part %s found in SET part", entry.left));
case VALUE_ALIAS:
case COLUMN_METADATA:
processedColumns.add(operation);
break;
}
}
processKeys(cfDef, whereClause, processedKeys, boundNames);
}
return new ParsedStatement.Prepared(this, Arrays.<ColumnSpecification>asList(boundNames));
}
public ParsedStatement.Prepared prepare() throws InvalidRequestException
{
ColumnSpecification[] names = new ColumnSpecification[getBoundsTerms()];
return prepare(names);
}
// Reused by DeleteStatement
static void processKeys(CFDefinition cfDef, List<Relation> keys, Map<ColumnIdentifier, List<Term>> processed, ColumnSpecification[] names) throws InvalidRequestException
{
for (Relation rel : keys)
{
CFDefinition.Name name = cfDef.get(rel.getEntity());
if (name == null)
throw new InvalidRequestException(String.format("Unknown key identifier %s", rel.getEntity()));
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
List<Term.Raw> rawValues;
if (rel.operator() == Relation.Type.EQ)
rawValues = Collections.singletonList(rel.getValue());
else if (name.kind == CFDefinition.Name.Kind.KEY_ALIAS && rel.operator() == Relation.Type.IN)
rawValues = rel.getInValues();
else
throw new InvalidRequestException(String.format("Invalid operator %s for PRIMARY KEY part %s", rel.operator(), name));
List<Term> values = new ArrayList<Term>(rawValues.size());
for (Term.Raw raw : rawValues)
{
Term t = raw.prepare(name);
t.collectMarkerSpecification(names);
values.add(t);
}
if (processed.put(name.name, values) != null)
throw new InvalidRequestException(String.format("Multiple definitions found for PRIMARY KEY part %s", name));
break;
case VALUE_ALIAS:
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Non PRIMARY KEY %s found in where clause", name));
}
}
}
public String toString()
{
return String.format("UpdateStatement(name=%s, keys=%s, columns=%s, timestamp=%s, timeToLive=%s)",
cfName,
whereClause,
operations,
isSetTimestamp() ? getTimestamp(-1) : "<now>",
getTimeToLive());
}
}
| apache-2.0 |
omc8db/DigitalVoice | software/pi/RadioHead/RH_CC110.cpp | 16586 | // RH_CC110.cpp
//
// Driver for Texas Instruments CC110L transceiver.
//
// Copyright (C) 2016 Mike McCauley
// $Id: RH_CC110.cpp,v 1.5 2017/01/12 23:58:00 mikem Exp $
#include <RH_CC110.h>
// Interrupt vectors for the 3 Arduino interrupt pins
// Each interrupt can be handled by a different instance of RH_CC110, allowing you to have
// 2 or more LORAs per Arduino
RH_CC110* RH_CC110::_deviceForInterrupt[RH_CC110_NUM_INTERRUPTS] = {0, 0, 0};
uint8_t RH_CC110::_interruptCount = 0; // Index into _deviceForInterrupt for next device
// We need 2 tables of modem configuration registers, since some values change depending on the Xtal frequency
// These are indexed by the values of ModemConfigChoice
// Canned modem configurations generated with the TI SmartRF Studio v7 version 2.3.0 on boodgie
// based on the sample 'Typical settings'
// Stored in flash (program) memory to save SRAM
// For 26MHz crystals
PROGMEM static const RH_CC110::ModemConfig MODEM_CONFIG_TABLE_26MHZ[] =
{
// 0B 0C 10 11 12 15 19 1A 1B 1C 1D 21 22 23 24 25 26 2C 2D 2E
{0x06, 0x00, 0xf5, 0x83, 0x13, 0x15, 0x16, 0x6c, 0x03, 0x40, 0x91, 0x56, 0x10, 0xe9, 0x2a, 0x00, 0x1f, 0x81, 0x35, 0x09}, // GFSK_Rb1_2Fd5_2
{0x06, 0x00, 0xf6, 0x83, 0x13, 0x15, 0x16, 0x6c, 0x03, 0x40, 0x91, 0x56, 0x10, 0xe9, 0x2a, 0x00, 0x1f, 0x81, 0x35, 0x09}, // GFSK_Rb2_4Fd5_2
{0x06, 0x00, 0xc7, 0x83, 0x13, 0x40, 0x16, 0x6c, 0x43, 0x40, 0x91, 0x56, 0x10, 0xe9, 0x2a, 0x00, 0x1f, 0x81, 0x35, 0x09}, // GFSK_Rb4_8Fd25_4
{0x06, 0x00, 0xc8, 0x93, 0x13, 0x34, 0x16, 0x6c, 0x43, 0x40, 0x91, 0x56, 0x10, 0xe9, 0x2a, 0x00, 0x1f, 0x81, 0x35, 0x09}, // GFSK_Rb10Fd19
{0x06, 0x00, 0xca, 0x83, 0x13, 0x35, 0x16, 0x6c, 0x43, 0x40, 0x91, 0x56, 0x10, 0xe9, 0x2a, 0x00, 0x1f, 0x81, 0x35, 0x09}, // GFSK_Rb38_4Fd20
{0x08, 0x00, 0x7b, 0x83, 0x13, 0x42, 0x1d, 0x1c, 0xc7, 0x00, 0xb2, 0xb6, 0x10, 0xea, 0x2a, 0x00, 0x1f, 0x81, 0x35, 0x09}, // GFSK_Rb76_8Fd32
{0x08, 0x00, 0x5b, 0xf8, 0x13, 0x47, 0x1d, 0x1c, 0xc7, 0x00, 0xb2, 0xb6, 0x10, 0xea, 0x2a, 0x00, 0x1f, 0x81, 0x31, 0x09}, // GFSK_Rb100Fd47
{0x0c, 0x00, 0x2d, 0x3b, 0x13, 0x62, 0x1d, 0x1c, 0xc7, 0x00, 0xb0, 0xb6, 0x10, 0xea, 0x2a, 0x00, 0x1f, 0x88, 0x31, 0x09}, // GFSK_Rb250Fd127
};
// For 27MHz crystals
PROGMEM static const RH_CC110::ModemConfig MODEM_CONFIG_TABLE_27MHZ[] =
{
// 0B 0C 10 11 12 15 19 1A 1B 1C 1D 21 22 23 24 25 26 2C 2D 2E
{0x06, 0x00, 0xf5, 0x75, 0x13, 0x14, 0x16, 0x6c, 0x03, 0x40, 0x91, 0x56, 0x10, 0xe9, 0x2a, 0x00, 0x1f, 0x81, 0x35, 0x09}, // GFSK_Rb1_2Fd5_2
{0x06, 0x00, 0xf6, 0x75, 0x13, 0x14, 0x16, 0x6c, 0x03, 0x40, 0x91, 0x56, 0x10, 0xe9, 0x2a, 0x00, 0x1f, 0x81, 0x35, 0x09}, // GFSK_Rb2_4Fd5_2
{0x06, 0x00, 0xc7, 0x75, 0x13, 0x37, 0x16, 0x6c, 0x43, 0x40, 0x91, 0x56, 0x10, 0xe9, 0x2a, 0x00, 0x1f, 0x81, 0x35, 0x09}, // GFSK_Rb4_8Fd25_4
{0x06, 0x00, 0xc8, 0x84, 0x13, 0x33, 0x16, 0x6c, 0x43, 0x40, 0x91, 0x56, 0x10, 0xe9, 0x2a, 0x00, 0x1f, 0x81, 0x35, 0x09}, // GFSK_Rb10Fd19
{0x06, 0x00, 0xca, 0x75, 0x13, 0x34, 0x16, 0x6c, 0x43, 0x40, 0x91, 0x56, 0x10, 0xe9, 0x2a, 0x00, 0x1f, 0x81, 0x35, 0x09}, // GFSK_Rb38_4Fd20
{0x08, 0x00, 0x7b, 0x75, 0x13, 0x42, 0x1d, 0x1c, 0xc7, 0x00, 0xb2, 0xb6, 0x10, 0xea, 0x2a, 0x00, 0x1f, 0x81, 0x35, 0x09}, // GFSK_Rb76_8Fd32
{0x08, 0x00, 0x5b, 0xf8, 0x13, 0x47, 0x1d, 0x1c, 0xc7, 0x00, 0xb2, 0xb6, 0x10, 0xea, 0x2a, 0x00, 0x1f, 0x81, 0x31, 0x09}, // GFSK_Rb100Fd47
{0x0c, 0x00, 0x2d, 0x2f, 0x13, 0x62, 0x1d, 0x1c, 0xc7, 0x00, 0xb0, 0xb6, 0x10, 0xea, 0x2a, 0x00, 0x1f, 0x88, 0x31, 0x09}, // GFSK_Rb250Fd127
};
// These power outputs are based on the suggested optimum values for
// multilayer inductors in the 915MHz frequency band. Per table 5-15.
// Yes these are not linear.
// Caution: this table is indexed by the values of enum TransmitPower
// Do not change one without changing the other.
// If you do not like these values, use setPaTable() directly.
PROGMEM static const uint8_t paPowerValues[] =
{
0x03, // -30dBm
0x0e, // -20dBm
0x1e, // -15dBm
0x27, // -10dBm
0x8e, // 0dBm
0xcd, // 5dBm
0xc7, // 7dBm
0xc0, // 10dBm
};
RH_CC110::RH_CC110(uint8_t slaveSelectPin, uint8_t interruptPin, bool is27MHz, RHGenericSPI& spi)
:
RHNRFSPIDriver(slaveSelectPin, spi),
_rxBufValid(false),
_is27MHz(is27MHz)
{
_interruptPin = interruptPin;
_myInterruptIndex = 0xff; // Not allocated yet
}
bool RH_CC110::init()
{
if (!RHNRFSPIDriver::init())
return false;
// Determine the interrupt number that corresponds to the interruptPin
int interruptNumber = digitalPinToInterrupt(_interruptPin);
if (interruptNumber == NOT_AN_INTERRUPT)
return false;
#ifdef RH_ATTACHINTERRUPT_TAKES_PIN_NUMBER
interruptNumber = _interruptPin;
#endif
// Reset the chip
// Strobe the reset
uint8_t val = spiCommand(RH_CC110_STROBE_30_SRES); // Reset
delay(100);
val = spiCommand(RH_CC110_STROBE_36_SIDLE); // IDLE
if (val != 0x0f)
return false; // No chip there or reset failed.
// Add by Adrien van den Bossche <vandenbo@univ-tlse2.fr> for Teensy
// ARM M4 requires the below. else pin interrupt doesn't work properly.
// On all other platforms, its innocuous, belt and braces
pinMode(_interruptPin, INPUT);
// Set up interrupt handler
// Since there are a limited number of interrupt glue functions isr*() available,
// we can only support a limited number of devices simultaneously
// ON some devices, notably most Arduinos, the interrupt pin passed in is actuallt the
// interrupt number. You have to figure out the interruptnumber-to-interruptpin mapping
// yourself based on knwledge of what Arduino board you are running on.
if (_myInterruptIndex == 0xff)
{
// First run, no interrupt allocated yet
if (_interruptCount <= RH_CC110_NUM_INTERRUPTS)
_myInterruptIndex = _interruptCount++;
else
return false; // Too many devices, not enough interrupt vectors
}
_deviceForInterrupt[_myInterruptIndex] = this;
if (_myInterruptIndex == 0)
attachInterrupt(interruptNumber, isr0, RISING);
else if (_myInterruptIndex == 1)
attachInterrupt(interruptNumber, isr1, RISING);
else if (_myInterruptIndex == 2)
attachInterrupt(interruptNumber, isr2, RISING);
else
return false; // Too many devices, not enough interrupt vectors
spiWriteRegister(RH_CC110_REG_02_IOCFG0, RH_CC110_GDO_CFG_CRC_OK_AUTORESET); // gdo0 interrupt on CRC_OK
spiWriteRegister(RH_CC110_REG_06_PKTLEN, RH_CC110_MAX_PAYLOAD_LEN); // max packet length
spiWriteRegister(RH_CC110_REG_07_PKTCTRL1, RH_CC110_CRC_AUTOFLUSH); // no append status, crc autoflush, no addr check
spiWriteRegister(RH_CC110_REG_08_PKTCTRL0, RH_CC110_PKT_FORMAT_NORMAL | RH_CC110_CRC_EN | RH_CC110_LENGTH_CONFIG_VARIABLE);
spiWriteRegister(RH_CC110_REG_13_MDMCFG1, RH_CC110_NUM_PREAMBLE_4); // 4 preamble bytes, chan spacing not used
spiWriteRegister(RH_CC110_REG_17_MCSM1, RH_CC110_CCA_MODE_RSSI_PACKET | RH_CC110_RXOFF_MODE_RX | RH_CC110_TXOFF_MODE_IDLE);
spiWriteRegister(RH_CC110_REG_18_MCSM0, RH_CC110_FS_AUTOCAL_FROM_IDLE | RH_CC110_PO_TIMEOUT_64); // cal when going to tx or rx
spiWriteRegister(RH_CC110_REG_20_WORCTRL, 0xfb); // from smartrf
spiWriteRegister(RH_CC110_REG_29_FSTEST, 0x59); // from smartrf
spiWriteRegister(RH_CC110_REG_2A_PTEST, 0x7f); // from smartrf
spiWriteRegister(RH_CC110_REG_2B_AGCTEST, 0x3f); // from smartrf
// Set some reasonable default values
uint8_t syncWords[] = { 0xd3, 0x91 };
setSyncWords(syncWords, sizeof(syncWords));
setTxPower(TransmitPower5dBm);
setFrequency(915.0);
setModemConfig(GFSK_Rb1_2Fd5_2);
return true;
}
void RH_CC110::setIs27MHz(bool is27MHz)
{
_is27MHz = is27MHz;
}
// C++ level interrupt handler for this instance
// We use this to get RxDone and TxDone interrupts
void RH_CC110::handleInterrupt()
{
// Serial.println("I");
if (_mode == RHModeRx)
{
// Radio is confgigured to stay in RX until we move it to IDLE after a CRC_OK message for us
// We only get interrupts in RX mode, on CRC_OK
// CRC OK
_lastRssi = spiBurstReadRegister(RH_CC110_REG_34_RSSI); // Was set when sync word was detected
_bufLen = spiReadRegister(RH_CC110_REG_3F_FIFO);
if (_bufLen < 4)
{
// Something wrong there, flush the FIFO
spiCommand(RH_CC110_STROBE_3A_SFRX);
clearRxBuf();
return;
}
spiBurstRead(RH_CC110_REG_3F_FIFO | RH_CC110_SPI_BURST_MASK | RH_CC110_SPI_READ_MASK, _buf, _bufLen);
// All good so far. See if its for us
validateRxBuf();
if (_rxBufValid)
setModeIdle(); // Done
}
}
// These are low level functions that call the interrupt handler for the correct
// instance of RH_CC110.
// 3 interrupts allows us to have 3 different devices
void RH_CC110::isr0()
{
if (_deviceForInterrupt[0])
_deviceForInterrupt[0]->handleInterrupt();
}
void RH_CC110::isr1()
{
if (_deviceForInterrupt[1])
_deviceForInterrupt[1]->handleInterrupt();
}
void RH_CC110::isr2()
{
if (_deviceForInterrupt[2])
_deviceForInterrupt[2]->handleInterrupt();
}
uint8_t RH_CC110::spiReadRegister(uint8_t reg)
{
return spiRead((reg & 0x3f) | RH_CC110_SPI_READ_MASK);
}
uint8_t RH_CC110::spiBurstReadRegister(uint8_t reg)
{
return spiRead((reg & 0x3f) | RH_CC110_SPI_READ_MASK | RH_CC110_SPI_BURST_MASK);
}
uint8_t RH_CC110::spiWriteRegister(uint8_t reg, uint8_t val)
{
return spiWrite((reg & 0x3f), val);
}
uint8_t RH_CC110::spiBurstWriteRegister(uint8_t reg, const uint8_t* src, uint8_t len)
{
return spiBurstWrite((reg & 0x3f) | RH_CC110_SPI_BURST_MASK, src, len);
}
bool RH_CC110::printRegisters()
{
#ifdef RH_HAVE_SERIAL
uint8_t i;
for (i = 0; i <= 0x2f; i++)
{
Serial.print(i, HEX);
Serial.print(": ");
Serial.println(spiReadRegister(i), HEX);
}
// Burst registers
for (i = 0x30; i <= 0x3e; i++)
{
Serial.print(i, HEX);
Serial.print(": ");
Serial.println(spiBurstReadRegister(i), HEX);
}
#endif
return true;
}
// Check whether the latest received message is complete and uncorrupted
void RH_CC110::validateRxBuf()
{
if (_bufLen < 4)
return; // Too short to be a real message
// Extract the 4 headers
_rxHeaderTo = _buf[0];
_rxHeaderFrom = _buf[1];
_rxHeaderId = _buf[2];
_rxHeaderFlags = _buf[3];
if (_promiscuous ||
_rxHeaderTo == _thisAddress ||
_rxHeaderTo == RH_BROADCAST_ADDRESS)
{
_rxGood++;
_rxBufValid = true;
}
}
bool RH_CC110::available()
{
if (_mode == RHModeTx)
return false;
if (_rxBufValid) // Will be set by the interrupt handler when a good message is received
return true;
setModeRx(); // Make sure we are receiving
return false; // Nothing yet
}
void RH_CC110::clearRxBuf()
{
ATOMIC_BLOCK_START;
_rxBufValid = false;
_bufLen = 0;
ATOMIC_BLOCK_END;
}
bool RH_CC110::recv(uint8_t* buf, uint8_t* len)
{
if (!available())
return false;
if (buf && len)
{
ATOMIC_BLOCK_START;
// Skip the 4 headers that are at the beginning of the rxBuf
if (*len > _bufLen - RH_CC110_HEADER_LEN)
*len = _bufLen - RH_CC110_HEADER_LEN;
memcpy(buf, _buf + RH_CC110_HEADER_LEN, *len);
ATOMIC_BLOCK_END;
}
clearRxBuf(); // This message accepted and cleared
return true;
}
bool RH_CC110::send(const uint8_t* data, uint8_t len)
{
if (len > RH_CC110_MAX_MESSAGE_LEN)
return false;
waitPacketSent(); // Make sure we dont interrupt an outgoing message
setModeIdle();
if (!waitCAD())
return false; // Check channel activity
spiWriteRegister(RH_CC110_REG_3F_FIFO, len + RH_CC110_HEADER_LEN);
spiWriteRegister(RH_CC110_REG_3F_FIFO,_txHeaderTo);
spiWriteRegister(RH_CC110_REG_3F_FIFO,_txHeaderFrom);
spiWriteRegister(RH_CC110_REG_3F_FIFO,_txHeaderId);
spiWriteRegister(RH_CC110_REG_3F_FIFO,_txHeaderFlags);
spiBurstWriteRegister(RH_CC110_REG_3F_FIFO, data, len);
// Radio returns to Idle when TX is finished
// need waitPacketSent() to detect change of _mode and TX completion
setModeTx();
return true;
}
uint8_t RH_CC110::maxMessageLength()
{
return RH_CC110_MAX_MESSAGE_LEN;
}
void RH_CC110::setModeIdle()
{
if (_mode != RHModeIdle)
{
spiCommand(RH_CC110_STROBE_36_SIDLE);
_mode = RHModeIdle;
}
}
bool RH_CC110::sleep()
{
if (_mode != RHModeSleep)
{
spiCommand(RH_CC110_STROBE_39_SPWD);
_mode = RHModeSleep;
}
return true;
}
void RH_CC110::setModeRx()
{
if (_mode != RHModeRx)
{
// Radio is configuewd to stay in RX mode
// only receipt of a CRC_OK wil cause us to return it to IDLE
spiCommand(RH_CC110_STROBE_34_SRX);
_mode = RHModeRx;
}
}
void RH_CC110::setModeTx()
{
if (_mode != RHModeTx)
{
spiCommand(RH_CC110_STROBE_35_STX);
_mode = RHModeTx;
}
}
uint8_t RH_CC110::statusRead()
{
return spiCommand(RH_CC110_STROBE_3D_SNOP);
}
// Sigh, this chip has no TXDONE type interrupt, so we have to poll
bool RH_CC110::waitPacketSent()
{
// If we are not currently in transmit mode, there is no packet to wait for
if (_mode != RHModeTx)
return false;
// Caution: may transition through CALIBRATE
while ((statusRead() & RH_CC110_STATUS_STATE) != RH_CC110_STATUS_IDLE)
YIELD;
_mode = RHModeIdle;
return true;
}
bool RH_CC110::setTxPower(TransmitPower power)
{
if (power > sizeof(paPowerValues))
return false;
uint8_t patable[2];
memcpy_P(&patable[0], (void*)&paPowerValues[power], sizeof(uint8_t));
patable[1] = 0x00;
setPaTable(patable, sizeof(patable));
return true;
}
void RH_CC110::setPaTable(uint8_t* patable, uint8_t patablesize)
{
spiBurstWriteRegister(RH_CC110_REG_3E_PATABLE, patable, patablesize);
}
bool RH_CC110::setFrequency(float centre)
{
// From section 5.21: fcarrier = fxosc / 2^16 * FREQ
uint32_t FREQ;
float fxosc = _is27MHz ? 27.0 : 26.0;
FREQ = (uint32_t)(centre * 65536 / fxosc);
// Some trivial checks
if (FREQ & 0xff000000)
return false;
spiWriteRegister(RH_CC110_REG_0D_FREQ2, (FREQ >> 16) & 0xff);
spiWriteRegister(RH_CC110_REG_0E_FREQ1, (FREQ >> 8) & 0xff);
spiWriteRegister(RH_CC110_REG_0F_FREQ0, FREQ & 0xff);
// Radio is configured to calibrate automatically whenever it enters RX or TX mode
// so no need to check for PLL lock here
return true;
}
// Sets registers from a canned modem configuration structure
void RH_CC110::setModemRegisters(const ModemConfig* config)
{
spiWriteRegister(RH_CC110_REG_0B_FSCTRL1, config->reg_0b);
spiWriteRegister(RH_CC110_REG_0C_FSCTRL0, config->reg_0c);
spiWriteRegister(RH_CC110_REG_10_MDMCFG4, config->reg_10);
spiWriteRegister(RH_CC110_REG_11_MDMCFG3, config->reg_11);
spiWriteRegister(RH_CC110_REG_12_MDMCFG2, config->reg_12);
spiWriteRegister(RH_CC110_REG_15_DEVIATN, config->reg_15);
spiWriteRegister(RH_CC110_REG_19_FOCCFG, config->reg_19);
spiWriteRegister(RH_CC110_REG_1A_BSCFG, config->reg_1a);
spiWriteRegister(RH_CC110_REG_1B_AGCCTRL2, config->reg_1b);
spiWriteRegister(RH_CC110_REG_1C_AGCCTRL1, config->reg_1c);
spiWriteRegister(RH_CC110_REG_1D_AGCCTRL0, config->reg_1d);
spiWriteRegister(RH_CC110_REG_21_FREND1, config->reg_21);
spiWriteRegister(RH_CC110_REG_22_FREND0, config->reg_22);
spiWriteRegister(RH_CC110_REG_23_FSCAL3, config->reg_23);
spiWriteRegister(RH_CC110_REG_24_FSCAL2, config->reg_24);
spiWriteRegister(RH_CC110_REG_25_FSCAL1, config->reg_25);
spiWriteRegister(RH_CC110_REG_26_FSCAL0, config->reg_26);
spiWriteRegister(RH_CC110_REG_2C_TEST2, config->reg_2c);
spiWriteRegister(RH_CC110_REG_2D_TEST1, config->reg_2d);
spiWriteRegister(RH_CC110_REG_2E_TEST0, config->reg_2e);
}
// Set one of the canned Modem configs
// Returns true if its a valid choice
bool RH_CC110::setModemConfig(ModemConfigChoice index)
{
if (index > (signed int)(sizeof(MODEM_CONFIG_TABLE_27MHZ) / sizeof(ModemConfig)))
return false;
const RH_CC110::ModemConfig *p = _is27MHz ? MODEM_CONFIG_TABLE_27MHZ : MODEM_CONFIG_TABLE_26MHZ ;
RH_CC110::ModemConfig cfg;
memcpy_P(&cfg, p + index, sizeof(RH_CC110::ModemConfig));
setModemRegisters(&cfg);
return true;
}
void RH_CC110::setSyncWords(const uint8_t* syncWords, uint8_t len)
{
if (!syncWords || len != 2)
return; // Only 2 byte sync words are supported
spiWriteRegister(RH_CC110_REG_04_SYNC1, syncWords[0]);
spiWriteRegister(RH_CC110_REG_05_SYNC0, syncWords[1]);
}
| apache-2.0 |
googlemaps/google-maps-services-java | src/main/java/com/google/maps/internal/DurationAdapter.java | 2242 | /*
* Copyright 2014 Google Inc. All rights reserved.
*
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
* ANY KIND, either express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.google.maps.internal;
import com.google.gson.TypeAdapter;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonToken;
import com.google.gson.stream.JsonWriter;
import com.google.maps.model.Distance;
import com.google.maps.model.Duration;
import java.io.IOException;
/**
* This class handles conversion from JSON to {@link Distance}.
*
* <p>Please see <a
* href="https://google-gson.googlecode.com/svn/trunk/gson/docs/javadocs/com/google/gson/TypeAdapter.html">GSON
* Type Adapter</a> for more detail.
*/
public class DurationAdapter extends TypeAdapter<Duration> {
/**
* Read a distance object from a Directions API result and convert it to a {@link Distance}.
*
* <p>We are expecting to receive something akin to the following:
*
* <pre>
* {
* "value": 207,
* "text": "0.1 mi"
* }
* </pre>
*/
@Override
public Duration read(JsonReader reader) throws IOException {
if (reader.peek() == JsonToken.NULL) {
reader.nextNull();
return null;
}
Duration duration = new Duration();
reader.beginObject();
while (reader.hasNext()) {
String name = reader.nextName();
if (name.equals("text")) {
duration.humanReadable = reader.nextString();
} else if (name.equals("value")) {
duration.inSeconds = reader.nextLong();
}
}
reader.endObject();
return duration;
}
/** This method is not implemented. */
@Override
public void write(JsonWriter writer, Duration value) throws IOException {
throw new UnsupportedOperationException("Unimplemented method");
}
}
| apache-2.0 |
williamchengit/TestRepo | solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java | 16938 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.schema;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.core.KeywordTokenizerFactory;
import org.apache.lucene.analysis.util.*;
import org.apache.lucene.util.Version;
import org.apache.solr.analysis.TokenizerChain;
import org.apache.solr.common.SolrException;
import org.apache.solr.util.DOMUtil;
import org.apache.solr.core.Config;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.util.plugin.AbstractPluginLoader;
import org.w3c.dom.*;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import java.util.*;
import java.lang.reflect.Constructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class FieldTypePluginLoader
extends AbstractPluginLoader<FieldType> {
private static final String LUCENE_MATCH_VERSION_PARAM
= IndexSchema.LUCENE_MATCH_VERSION_PARAM;
protected final static Logger log
= LoggerFactory.getLogger(FieldTypePluginLoader.class);
private final XPath xpath = XPathFactory.newInstance().newXPath();
/**
* @param schema The schema that will be used to initialize the FieldTypes
* @param fieldTypes All FieldTypes that are instantiated by
* this Plugin Loader will be added to this Map
* @param schemaAware Any SchemaAware objects that are instantiated by
* this Plugin Loader will be added to this collection.
*/
public FieldTypePluginLoader(final IndexSchema schema,
final Map<String, FieldType> fieldTypes,
final Collection<SchemaAware> schemaAware) {
super("[schema.xml] fieldType", FieldType.class, true, true);
this.schema = schema;
this.fieldTypes = fieldTypes;
this.schemaAware = schemaAware;
}
private final IndexSchema schema;
private final Map<String, FieldType> fieldTypes;
private final Collection<SchemaAware> schemaAware;
@Override
protected FieldType create( SolrResourceLoader loader,
String name,
String className,
Node node ) throws Exception {
FieldType ft = loader.newInstance(className, FieldType.class);
ft.setTypeName(name);
String expression = "./analyzer[@type='query']";
Node anode = (Node)xpath.evaluate(expression, node, XPathConstants.NODE);
Analyzer queryAnalyzer = readAnalyzer(anode);
expression = "./analyzer[@type='multiterm']";
anode = (Node)xpath.evaluate(expression, node, XPathConstants.NODE);
Analyzer multiAnalyzer = readAnalyzer(anode);
// An analyzer without a type specified, or with type="index"
expression = "./analyzer[not(@type)] | ./analyzer[@type='index']";
anode = (Node)xpath.evaluate(expression, node, XPathConstants.NODE);
Analyzer analyzer = readAnalyzer(anode);
// a custom similarity[Factory]
expression = "./similarity";
anode = (Node)xpath.evaluate(expression, node, XPathConstants.NODE);
SimilarityFactory simFactory = IndexSchema.readSimilarity(loader, anode);
if (null != simFactory) {
ft.setSimilarity(simFactory);
}
if (null == queryAnalyzer) {
queryAnalyzer = analyzer;
ft.setIsExplicitQueryAnalyzer(false);
} else {
ft.setIsExplicitQueryAnalyzer(true);
}
if (null == analyzer) {
analyzer = queryAnalyzer;
ft.setIsExplicitAnalyzer(false);
} else {
ft.setIsExplicitAnalyzer(true);
}
if (null != analyzer) {
ft.setIndexAnalyzer(analyzer);
ft.setQueryAnalyzer(queryAnalyzer);
if (ft instanceof TextField) {
if (null == multiAnalyzer) {
multiAnalyzer = constructMultiTermAnalyzer(queryAnalyzer);
((TextField)ft).setIsExplicitMultiTermAnalyzer(false);
} else {
((TextField)ft).setIsExplicitMultiTermAnalyzer(true);
}
((TextField)ft).setMultiTermAnalyzer(multiAnalyzer);
}
}
if (ft instanceof SchemaAware){
schemaAware.add((SchemaAware) ft);
}
return ft;
}
@Override
protected void init(FieldType plugin, Node node) throws Exception {
Map<String,String> params = DOMUtil.toMapExcept( node.getAttributes(), "name");
plugin.setArgs(schema, params);
}
@Override
protected FieldType register(String name,
FieldType plugin) throws Exception {
log.trace("fieldtype defined: " + plugin );
return fieldTypes.put( name, plugin );
}
// The point here is that, if no multiterm analyzer was specified in the schema file, do one of several things:
// 1> If legacyMultiTerm == false, assemble a new analyzer composed of all of the charfilters,
// lowercase filters and asciifoldingfilter.
// 2> If legacyMultiTerm == true just construct the analyzer from a KeywordTokenizer. That should mimic current behavior.
// Do the same if they've specified that the old behavior is required (legacyMultiTerm="true")
private Analyzer constructMultiTermAnalyzer(Analyzer queryAnalyzer) {
if (queryAnalyzer == null) return null;
if (!(queryAnalyzer instanceof TokenizerChain)) {
return new KeywordAnalyzer();
}
TokenizerChain tc = (TokenizerChain) queryAnalyzer;
MultiTermChainBuilder builder = new MultiTermChainBuilder();
CharFilterFactory[] charFactories = tc.getCharFilterFactories();
if (charFactories != null) {
for (CharFilterFactory fact : charFactories) {
builder.add(fact);
}
}
builder.add(tc.getTokenizerFactory());
for (TokenFilterFactory fact : tc.getTokenFilterFactories()) {
builder.add(fact);
}
return builder.build();
}
private static class MultiTermChainBuilder {
static final KeywordTokenizerFactory keyFactory = new KeywordTokenizerFactory(new HashMap<String,String>());
ArrayList<CharFilterFactory> charFilters = null;
ArrayList<TokenFilterFactory> filters = new ArrayList<>(2);
TokenizerFactory tokenizer = keyFactory;
public void add(Object current) {
if (!(current instanceof MultiTermAwareComponent)) return;
AbstractAnalysisFactory newComponent = ((MultiTermAwareComponent)current).getMultiTermComponent();
if (newComponent instanceof TokenFilterFactory) {
if (filters == null) {
filters = new ArrayList<>(2);
}
filters.add((TokenFilterFactory)newComponent);
} else if (newComponent instanceof TokenizerFactory) {
tokenizer = (TokenizerFactory)newComponent;
} else if (newComponent instanceof CharFilterFactory) {
if (charFilters == null) {
charFilters = new ArrayList<>(1);
}
charFilters.add( (CharFilterFactory)newComponent);
} else {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown analysis component from MultiTermAwareComponent: " + newComponent);
}
}
public TokenizerChain build() {
CharFilterFactory[] charFilterArr = charFilters == null ? null : charFilters.toArray(new CharFilterFactory[charFilters.size()]);
TokenFilterFactory[] filterArr = filters == null ? new TokenFilterFactory[0] : filters.toArray(new TokenFilterFactory[filters.size()]);
return new TokenizerChain(charFilterArr, tokenizer, filterArr);
}
}
//
// <analyzer><tokenizer class="...."/><tokenizer class="...." arg="....">
//
//
private Analyzer readAnalyzer(Node node) throws XPathExpressionException {
final SolrResourceLoader loader = schema.getResourceLoader();
// parent node used to be passed in as "fieldtype"
// if (!fieldtype.hasChildNodes()) return null;
// Node node = DOMUtil.getChild(fieldtype,"analyzer");
if (node == null) return null;
NamedNodeMap attrs = node.getAttributes();
String analyzerName = DOMUtil.getAttr(attrs,"class");
// check for all of these up front, so we can error if used in
// conjunction with an explicit analyzer class.
NodeList charFilterNodes = (NodeList)xpath.evaluate
("./charFilter", node, XPathConstants.NODESET);
NodeList tokenizerNodes = (NodeList)xpath.evaluate
("./tokenizer", node, XPathConstants.NODESET);
NodeList tokenFilterNodes = (NodeList)xpath.evaluate
("./filter", node, XPathConstants.NODESET);
if (analyzerName != null) {
// explicitly check for child analysis factories instead of
// just any child nodes, because the user might have their
// own custom nodes (ie: <description> or something like that)
if (0 != charFilterNodes.getLength() ||
0 != tokenizerNodes.getLength() ||
0 != tokenFilterNodes.getLength()) {
throw new SolrException
( SolrException.ErrorCode.SERVER_ERROR,
"Configuration Error: Analyzer class='" + analyzerName +
"' can not be combined with nested analysis factories");
}
try {
// No need to be core-aware as Analyzers are not in the core-aware list
final Class<? extends Analyzer> clazz = loader.findClass(analyzerName, Analyzer.class);
try {
// first try to use a ctor with version parameter
// (needed for many new Analyzers that have no default one anymore)
Constructor<? extends Analyzer> cnstr
= clazz.getConstructor(Version.class);
final String matchVersionStr
= DOMUtil.getAttr(attrs, LUCENE_MATCH_VERSION_PARAM);
final Version luceneMatchVersion = (matchVersionStr == null) ?
schema.getDefaultLuceneMatchVersion() :
Config.parseLuceneVersionString(matchVersionStr);
if (luceneMatchVersion == null) {
throw new SolrException
( SolrException.ErrorCode.SERVER_ERROR,
"Configuration Error: Analyzer '" + clazz.getName() +
"' needs a 'luceneMatchVersion' parameter");
}
return cnstr.newInstance(luceneMatchVersion);
} catch (NoSuchMethodException nsme) {
// otherwise use default ctor
return clazz.newInstance();
}
} catch (Exception e) {
log.error("Cannot load analyzer: "+analyzerName, e);
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
"Cannot load analyzer: "+analyzerName, e );
}
}
// Load the CharFilters
final ArrayList<CharFilterFactory> charFilters
= new ArrayList<>();
AbstractPluginLoader<CharFilterFactory> charFilterLoader =
new AbstractPluginLoader<CharFilterFactory>
("[schema.xml] analyzer/charFilter", CharFilterFactory.class, false, false) {
@Override
protected CharFilterFactory create(SolrResourceLoader loader, String name, String className, Node node) throws Exception {
final Map<String,String> params = DOMUtil.toMap(node.getAttributes());
String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM);
params.put(LUCENE_MATCH_VERSION_PARAM, parseConfiguredVersion(configuredVersion, CharFilterFactory.class.getSimpleName()).toString());
CharFilterFactory factory = loader.newInstance(className, CharFilterFactory.class, getDefaultPackages(), new Class[] { Map.class }, new Object[] { params });
factory.setExplicitLuceneMatchVersion(null != configuredVersion);
return factory;
}
@Override
protected void init(CharFilterFactory plugin, Node node) throws Exception {
if( plugin != null ) {
charFilters.add( plugin );
}
}
@Override
protected CharFilterFactory register(String name,
CharFilterFactory plugin) {
return null; // used for map registration
}
};
charFilterLoader.load( loader, charFilterNodes );
// Load the Tokenizer
// Although an analyzer only allows a single Tokenizer, we load a list to make sure
// the configuration is ok
final ArrayList<TokenizerFactory> tokenizers
= new ArrayList<>(1);
AbstractPluginLoader<TokenizerFactory> tokenizerLoader =
new AbstractPluginLoader<TokenizerFactory>
("[schema.xml] analyzer/tokenizer", TokenizerFactory.class, false, false) {
@Override
protected TokenizerFactory create(SolrResourceLoader loader, String name, String className, Node node) throws Exception {
final Map<String,String> params = DOMUtil.toMap(node.getAttributes());
String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM);
params.put(LUCENE_MATCH_VERSION_PARAM, parseConfiguredVersion(configuredVersion, TokenizerFactory.class.getSimpleName()).toString());
TokenizerFactory factory = loader.newInstance(className, TokenizerFactory.class, getDefaultPackages(), new Class[] { Map.class }, new Object[] { params });
factory.setExplicitLuceneMatchVersion(null != configuredVersion);
return factory;
}
@Override
protected void init(TokenizerFactory plugin, Node node) throws Exception {
if( !tokenizers.isEmpty() ) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
"The schema defines multiple tokenizers for: "+node );
}
tokenizers.add( plugin );
}
@Override
protected TokenizerFactory register(String name, TokenizerFactory plugin) {
return null; // used for map registration
}
};
tokenizerLoader.load( loader, tokenizerNodes );
// Make sure something was loaded
if( tokenizers.isEmpty() ) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,"analyzer without class or tokenizer");
}
// Load the Filters
final ArrayList<TokenFilterFactory> filters
= new ArrayList<>();
AbstractPluginLoader<TokenFilterFactory> filterLoader =
new AbstractPluginLoader<TokenFilterFactory>("[schema.xml] analyzer/filter", TokenFilterFactory.class, false, false)
{
@Override
protected TokenFilterFactory create(SolrResourceLoader loader, String name, String className, Node node) throws Exception {
final Map<String,String> params = DOMUtil.toMap(node.getAttributes());
String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM);
params.put(LUCENE_MATCH_VERSION_PARAM, parseConfiguredVersion(configuredVersion, TokenFilterFactory.class.getSimpleName()).toString());
TokenFilterFactory factory = loader.newInstance
(className, TokenFilterFactory.class, getDefaultPackages(), new Class[] { Map.class }, new Object[] { params });
factory.setExplicitLuceneMatchVersion(null != configuredVersion);
return factory;
}
@Override
protected void init(TokenFilterFactory plugin, Node node) throws Exception {
if( plugin != null ) {
filters.add( plugin );
}
}
@Override
protected TokenFilterFactory register(String name, TokenFilterFactory plugin) throws Exception {
return null; // used for map registration
}
};
filterLoader.load( loader, tokenFilterNodes );
return new TokenizerChain(charFilters.toArray(new CharFilterFactory[charFilters.size()]),
tokenizers.get(0), filters.toArray(new TokenFilterFactory[filters.size()]));
}
private Version parseConfiguredVersion(String configuredVersion, String pluginClassName) {
Version version = (configuredVersion != null) ?
Config.parseLuceneVersionString(configuredVersion) : schema.getDefaultLuceneMatchVersion();
if (!version.onOrAfter(Version.LUCENE_4_0)) {
log.warn(pluginClassName + " is using deprecated " + version +
" emulation. You should at some point declare and reindex to at least 4.0, because " +
"3.x emulation is deprecated and will be removed in 5.0");
}
return version;
}
}
| apache-2.0 |
AndreasRicci/firebase-continue | samples/chrome-extension/Continote/scripts/main-popup.js | 10669 | /**
* Copyright (c) 2017 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/**
* This script is loaded and running whenever the extension browser action
* popup is open. It will not continue to run in the background, so this script
* should only be used for presenting the user with UI for immediate actions
* such as signing in (through use of a secondary popup), signing out, and
* interacting with Firebase Continue data (i.e. either continuing or
* dismissing an activity).
*/
(function() {
'use strict';
/**
* AuthHelper instance to simplify Firebase Auth usage.
*
* @type {?AuthHelper}
*/
var authHelper_ = null;
/**
* Various UI elements which will be manipulated through the lifecycle of
* this popup. These are organized as part of an object for clarity and a
* cleaner namespace.
*
* @type {!Object}
* @const
*/
var popupUi_ = {
userSignedInContentContainer: null,
userSignedOutContentContainer: null,
userDisplayName: null,
userEmail: null,
signInButton: null,
signOutButton: null,
userHasNoteToContinueWritingContainer: null,
continueWritingNoteButton: null,
dismissWritingNoteButton: null
};
/**
* Handles when the user has an Activity they wish to continue,
* according to the Firebase Continue library.
*
* In the case of Continote, that means the user has a note they may wish to
* continue writing.
*
* @function
* @const
*/
var handleUserHasNoteToContinueWriting_ = function() {
// Show the UI for users who have a note they may wish to continue writing,
// in case it was previously hidden.
Utils.showElement(popupUi_.userHasNoteToContinueWritingContainer);
Utils.enableButtonAndAddClickListener(
popupUi_.continueWritingNoteButton,
handleContinueWritingNoteButtonClicked_);
Utils.enableButtonAndAddClickListener(
popupUi_.dismissWritingNoteButton,
handleDismissWritingNoteButtonClicked_);
};
/**
* Handles when the user has no Activity they may wish to continue,
* according to the Firebase Continue library.
*
* In the case of Continote, that means the user has no note they may wish to
* continue writing.
*
* @function
* @const
*/
var handleUserHasNoNoteToContinueWriting_ = function() {
// Hide the UI for users who have a note they may wish to continue writing.
Utils.hideElement(popupUi_.userHasNoteToContinueWritingContainer);
Utils.disableButtonAndRemoveClickListener(
popupUi_.continueWritingNoteButton,
handleContinueWritingNoteButtonClicked_);
Utils.disableButtonAndRemoveClickListener(
popupUi_.dismissWritingNoteButton,
handleDismissWritingNoteButtonClicked_);
};
/**
* Handles when the most recent Activity the user may wish to to continue
* changes (i.e. is either set to an actual value, or null).
*
* This is also invoked right away by Firebase Continue with the most recent
* Activity the user may wish to continue when the handler is registered.
*
* @type {!FirebaseContinue.ActivityChangedCallback}
* @const
*/
var handleUserNoteToContinueWritingChanged_ = function(user, activity) {
// If the activity is non-null, the user may wish to continue writing a
// note. Otherwise, the user has no note they may wish to continue writing.
activity ? handleUserHasNoteToContinueWriting_() :
handleUserHasNoNoteToContinueWriting_();
};
/**
* Handles when the user signs in.
*
* See auth-helper.js for more details.
*
* @type {!UserSignedInCallback}
* @const
*/
var handleUserSignedIn_ = function(user) {
// Hide the UI for signed out users.
Utils.hideElement(popupUi_.userSignedOutContentContainer);
Utils.disableButtonAndRemoveClickListener(
popupUi_.signInButton, handleSignInButtonClicked_);
// Show the UI for signed in users.
Utils.showElement(popupUi_.userSignedInContentContainer);
popupUi_.userDisplayName.textContent = user.displayName;
popupUi_.userEmail.textContent = user.email;
Utils.enableButtonAndAddClickListener(
popupUi_.signOutButton, handleSignOutButtonClicked_);
};
/**
* Handles when the user signs out.
*
* See auth-helper.js for more details.
*
* @type {!UserSignedOutCallback}
* @const
*/
var handleUserSignedOut_ = function() {
// Hide the UI for signed in users.
Utils.hideElement(popupUi_.userSignedInContentContainer);
Utils.disableButtonAndRemoveClickListener(
popupUi_.signOutButton, handleSignOutButtonClicked_);
// Show the UI for signed out users.
Utils.showElement(popupUi_.userSignedOutContentContainer);
Utils.enableButtonAndAddClickListener(
popupUi_.signInButton, handleSignInButtonClicked_);
};
/**
* Handles when the sign in button is clicked.
*
* Displays another popup for sign in purposes if the user is signed out.
*
* See the documentation in signin-popup.js to understand why a secondary
* popup is needed.
*
* @type {!ClickEventListener}
* @const
*/
var handleSignInButtonClicked_ = function(event) {
event.preventDefault();
// Since the click event listener is only on the sign in button when the
// user is signed out, we can reasonably assume the user is signed out.
// However, presenting the sign in popup will fail if the user is already
// signed in, so we need to handle that case in the catch function
// below - just in case.
authHelper_.presentSignInPopup().catch(function(error) {
switch (error) {
case authHelper_.errorMessages.userAlreadySignedIn:
// Do nothing, as the user is already signed in.
break;
default:
console.error("Error during sign in: " + error);
}
});
};
/**
* Handles when the sign out button is clicked.
*
* Signs the user out, if they are signed in.
*
* @type {!ClickEventListener}
* @const
*/
var handleSignOutButtonClicked_ = function(event) {
event.preventDefault();
// Since the click event listener is only on the sign out button when the
// user is signed in, we can reasonably assume the user is signed in.
// However, signing out will fail if the user is already signed out,
// so we need to handle that case in the catch function
// below - just in case.
authHelper_.signOut().catch(function(error) {
switch (error) {
case authHelper_.errorMessages.userAlreadySignedOut:
// Do nothing, as the user is already signed out.
break;
default:
console.error("Error during sign out: " + error);
}
});
};
/**
* Handles when the continue writing note button is clicked.
*
* Continues the latest Activity for the user, which in Continote's case
* means continuing to write the note which the user last signaled they
* wished to do.
*
* @type {!ClickEventListener}
* @const
*/
var handleContinueWritingNoteButtonClicked_ = function(event) {
event.preventDefault();
FirebaseContinue.getInstanceFor(Constants.appName)
.then(function(firebaseContinueInstance) {
return firebaseContinueInstance.continueLatestActivity();
})
.catch(function(error) {
console.error("Error opening note to continue writing: " + error);
});
};
/**
* Handles when the dismiss writing note button is clicked.
*
* Dismisses the latest Activity for the user, which in Continote's case
* means not continuing to write the note which the user last signaled they
* wished to do.
*
* @type {!ClickEventListener}
* @const
*/
var handleDismissWritingNoteButtonClicked_ = function(event) {
event.preventDefault();
FirebaseContinue.getInstanceFor(Constants.appName)
.then(function(firebaseContinueInstance) {
return firebaseContinueInstance.dismissLatestActivity();
})
.catch(function(error) {
console.error("Error dismissing note to continue writing: " + error);
});
};
/**
* Initializes this extension's browser action popup.
*
* This is the main entry point of this popup script.
*
* @function
* @const
*/
var init_ = function() {
// Hold references to various UI elements for later manipulation.
popupUi_.userSignedInContentContainer = document.getElementById(
"user-signed-in-content-container");
popupUi_.userSignedOutContentContainer = document.getElementById(
"user-signed-out-content-container");
popupUi_.userDisplayName = document.getElementById("user-display-name");
popupUi_.userEmail = document.getElementById("user-email");
popupUi_.signInButton = document.getElementById("sign-in-button");
popupUi_.signOutButton = document.getElementById("sign-out-button");
popupUi_.userHasNoteToContinueWritingContainer = document.getElementById(
"user-has-note-to-continue-writing-container");
popupUi_.continueWritingNoteButton = document.getElementById(
"continue-writing-note-button");
popupUi_.dismissWritingNoteButton = document.getElementById(
"dismiss-writing-note-button");
// Now that the page is ready, set up the Firebase Auth helper to listen
// for sign in state changes.
authHelper_ = new AuthHelper(handleUserSignedIn_, handleUserSignedOut_);
// Begin listening for Firebase Continue events. In reacting to these
// events, we will update the UI based on whether the user wishes to
// continue writing a note or not.
FirebaseContinue.getInstanceFor(Constants.appName)
.then(function(firebaseContinueInstance) {
firebaseContinueInstance.onActivityChanged(
handleUserNoteToContinueWritingChanged_);
})
.catch(function(error) {
console.error(
"Error registering callback with Firebase Continue: " + error);
});
};
// When the popup is ready, call the init function.
window.addEventListener("load", init_);
})();
| apache-2.0 |
osglworks/java-mvc | src/main/java/org/osgl/mvc/result/ServiceUnavailable.java | 7818 | package org.osgl.mvc.result;
/*-
* #%L
* OSGL MVC
* %%
* Copyright (C) 2014 - 2017 OSGL (Open Source General Library)
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static org.osgl.http.H.Status.SERVICE_UNAVAILABLE;
/**
* The server is currently unavailable (because it is overloaded or down for maintenance). Generally, this
* is a temporary state.
*/
public class ServiceUnavailable extends ErrorResult {
private static final ServiceUnavailable _INSTANCE = new ServiceUnavailable() {
@Override
public String getMessage() {
return payload().message;
}
@Override
public Integer errorCode() {
return payload().errorCode;
}
@Override
public long timestamp() {
return payload().timestamp;
}
};
public ServiceUnavailable() {
super(SERVICE_UNAVAILABLE);
}
public ServiceUnavailable(String message, Object ... args) {
super(SERVICE_UNAVAILABLE, message, args);
}
public ServiceUnavailable(Throwable cause, String message, Object ... args) {
super(SERVICE_UNAVAILABLE, cause, message, args);
}
public ServiceUnavailable(Throwable cause) {
super(SERVICE_UNAVAILABLE, cause);
}
public ServiceUnavailable(int errorCode) {
super(SERVICE_UNAVAILABLE, errorCode);
}
public ServiceUnavailable(int errorCode, String message, Object ... args) {
super(SERVICE_UNAVAILABLE, errorCode, message, args);
}
public ServiceUnavailable(int errorCode, Throwable cause, String message, Object ... args) {
super(SERVICE_UNAVAILABLE, errorCode, cause, message, args);
}
public ServiceUnavailable(int errorCode, Throwable cause) {
super(SERVICE_UNAVAILABLE, errorCode, cause);
}
/**
* Returns a static ServiceUnavailable instance and set the {@link #payload} thread local
* with default message.
*
* When calling the instance on {@link #getMessage()} method, it will return whatever
* stored in the {@link #payload} thread local
*
* @return a static ServiceUnavailable instance as described above
*/
public static ServiceUnavailable get() {
if (_localizedErrorMsg()) {
return of(defaultMessage(SERVICE_UNAVAILABLE));
} else {
touchPayload();
return _INSTANCE;
}
}
/**
* Returns a static ServiceUnavailable instance and set the {@link #payload} thread local
* with message specified.
*
* When calling the instance on {@link #getMessage()} method, it will return whatever
* stored in the {@link #payload} thread local
*
* @param message the message
* @param args the message arguments
* @return a static ServiceUnavailable instance as described above
*/
public static ServiceUnavailable of(String message, Object... args) {
touchPayload().message(message, args);
return _INSTANCE;
}
/**
* Returns a static ServiceUnavailable instance and set the {@link #payload} thread local
* with cause specified.
*
* When calling the instance on {@link #getMessage()} method, it will return whatever
* stored in the {@link #payload} thread local
*
* @param cause the cause
* @return a static ServiceUnavailable instance as described above
*/
public static ServiceUnavailable of(Throwable cause) {
if (_localizedErrorMsg()) {
return of(cause, defaultMessage(SERVICE_UNAVAILABLE));
} else {
touchPayload().cause(cause);
return _INSTANCE;
}
}
/**
* Returns a static ServiceUnavailable instance and set the {@link #payload} thread local
* with cause and message specified.
*
* When calling the instance on {@link #getMessage()} method, it will return whatever
* stored in the {@link #payload} thread local
*
* @param cause the cause
* @param message the message
* @param args the message arguments
* @return a static ServiceUnavailable instance as described above
*/
public static ServiceUnavailable of(Throwable cause, String message, Object... args) {
touchPayload().message(message, args).cause(cause);
return _INSTANCE;
}
/**
* Returns a static ServiceUnavailable instance and set the {@link #payload} thread local
* with error code and default message.
*
* When calling the instance on {@link #getMessage()} method, it will return whatever
* stored in the {@link #payload} thread local
*
* @param errorCode the app defined error code
* @return a static ServiceUnavailable instance as described above
*/
public static ServiceUnavailable of(int errorCode) {
if (_localizedErrorMsg()) {
return of(errorCode, defaultMessage(SERVICE_UNAVAILABLE));
} else {
touchPayload().errorCode(errorCode);
return _INSTANCE;
}
}
/**
* Returns a static ServiceUnavailable instance and set the {@link #payload} thread local
* with error code and message specified.
*
* When calling the instance on {@link #getMessage()} method, it will return whatever
* stored in the {@link #payload} thread local
*
* @param errorCode the app defined error code
* @param message the message
* @param args the message arguments
* @return a static ServiceUnavailable instance as described above
*/
public static ServiceUnavailable of(int errorCode, String message, Object... args) {
touchPayload().errorCode(errorCode).message(message, args);
return _INSTANCE;
}
/**
* Returns a static ServiceUnavailable instance and set the {@link #payload} thread local
* with error code and cause specified
*
* When calling the instance on {@link #getMessage()} method, it will return whatever
* stored in the {@link #payload} thread local
*
* @param cause the cause
* @param errorCode the app defined error code
* @return a static ServiceUnavailable instance as described above
*/
public static ServiceUnavailable of(int errorCode, Throwable cause) {
if (_localizedErrorMsg()) {
return of(errorCode, cause, defaultMessage(SERVICE_UNAVAILABLE));
} else {
touchPayload().errorCode(errorCode).cause(cause);
return _INSTANCE;
}
}
/**
* Returns a static ServiceUnavailable instance and set the {@link #payload} thread local
* with error code, cause and message specified.
*
* When calling the instance on {@link #getMessage()} method, it will return whatever
* stored in the {@link #payload} thread local
*
* @param cause the cause
* @param errorCode the app defined error code
* @param message the message
* @param args the message arguments
* @return a static ServiceUnavailable instance as described above
*/
public static ServiceUnavailable of(int errorCode, Throwable cause, String message, Object... args) {
touchPayload().errorCode(errorCode).message(message, args).cause(cause);
return _INSTANCE;
}
}
| apache-2.0 |
FINRAOS/JTAF-XCore | src/main/java/org/finra/jtaf/core/parsing/ScriptParser.java | 31121 | /*
* (C) Copyright 2014 Java Test Automation Framework Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.finra.jtaf.core.parsing;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.log4j.Logger;
import org.finra.jtaf.core.CommandRegistry;
import org.finra.jtaf.core.exceptions.DependencyException;
import org.finra.jtaf.core.model.exceptions.NameFormatException;
import org.finra.jtaf.core.model.statement.Invocation;
import org.finra.jtaf.core.model.test.Requirement;
import org.finra.jtaf.core.model.test.TestNamespace;
import org.finra.jtaf.core.model.test.TestScript;
import org.finra.jtaf.core.model.test.TestSuite;
import org.finra.jtaf.core.model.test.digraph.Dependencies;
import org.finra.jtaf.core.model.test.digraph.DiNode;
import org.finra.jtaf.core.model.test.digraph.TestDigraph;
import org.finra.jtaf.core.parsing.exceptions.ExceptionAccumulator;
import org.finra.jtaf.core.parsing.exceptions.MissingAttributeException;
import org.finra.jtaf.core.parsing.exceptions.ParsingException;
import org.finra.jtaf.core.parsing.exceptions.UnexpectedElementException;
import org.finra.jtaf.core.parsing.helpers.AttributeHelper;
import org.finra.jtaf.core.parsing.helpers.ParserHelper;
import org.finra.jtaf.core.plugins.parsing.IPostParseSuitePlugin;
import org.finra.jtaf.core.plugins.parsing.IPostParseTestPlugin;
import org.finra.jtaf.core.plugins.parsing.PostSuiteParserPluginContext;
import org.finra.jtaf.core.plugins.parsing.PostTestParserPluginContext;
import org.finra.jtaf.core.utilities.ExcelFileParser;
import org.finra.jtaf.core.utilities.StringHelper;
import org.finra.jtaf.core.utilities.logging.MessageCollector;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import au.com.bytecode.opencsv.CSVReader;
/**
* FIXME This is a "just get the job done" implementation. I plan to replace
* this class as soon as I get the chance. (I mean... ay ay ay! This is bad!)
*
*/
public class ScriptParser {
static Logger logger = Logger.getLogger(ScriptParser.class.getPackage().getName());
private final DocumentBuilder db;
private TestDigraph digraph;
private StatementParser stmtParser;
private Document d;
private CommandRegistry commandRegistry;
private List<IPostParseSuitePlugin> postParseSuitePlugins;
private List<IPostParseTestPlugin> postParseTestPlugins;
public ScriptParser() throws ParserConfigurationException {
db = DocumentBuilderFactory.newInstance().newDocumentBuilder();
stmtParser = new StatementParser();
}
public void setDigraph(TestDigraph digraph) {
this.digraph = digraph;
}
public void setPostParseSuitePlugins(List<IPostParseSuitePlugin> plugins) {
postParseSuitePlugins = plugins;
}
public void setPostParseTestPlugins(List<IPostParseTestPlugin> plugins) {
postParseTestPlugins = plugins;
}
public void setCommandRegistry(CommandRegistry commandRegistry) {
this.commandRegistry = commandRegistry;
}
public final TestNamespace handleTestSource(File f, MessageCollector mc)
throws NameFormatException, SAXException, IOException, ParsingException {
if (!f.exists()) {
throw new FileNotFoundException(f.getAbsolutePath());
}
TestNamespace testNamespace = new TestNamespace(f.getName());
if (f.isDirectory() && !f.isHidden()) {
ExceptionAccumulator acc = new ExceptionAccumulator();
for (File child : f.listFiles()) {
if (child.isDirectory()) {
try {
testNamespace.add(handleTestSource(child, mc));
} catch (Throwable th) {
mc.error(th.getMessage());
acc.add(th);
}
} else { // It's a file
try {
TestSuite ts = handleTestSuite(child, mc);
if (ts != null) {
testNamespace.add(ts);
// run all post suite parse plugins
Node suiteRootNode = (Node) (d.getDocumentElement());
// TODO: This needs to check for null, otherwise it
// crashes
if (postParseSuitePlugins != null) {
for (IPostParseSuitePlugin p : postParseSuitePlugins) {
if (suiteContainsTheTag(suiteRootNode, p.getTagName())) {
p.execute(new PostSuiteParserPluginContext(commandRegistry, ts, suiteRootNode));
}
}
}
}
} catch (Throwable th) {
mc.error(th.getMessage());
acc.add(th);
}
}
}
if (!acc.isEmpty()) {
throw acc;
}
}
return testNamespace;
}
private boolean suiteContainsTheTag(Node suiteRootNode, String tagName) {
NodeList children = suiteRootNode.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
if (children.item(i).getNodeName().equalsIgnoreCase(tagName)) {
return true;
}
}
return false;
}
private TestSuite handleTestSuite(File f, MessageCollector mc) throws ParsingException,
NameFormatException, IOException {
if (!f.exists()) {
throw new FileNotFoundException(f.getAbsolutePath());
}
TestSuite testSuite = null;
if (f.isFile() && f.getName().endsWith(".xml")) { // This should be a
// TestSuite
mc.push("In test file " + f.getAbsolutePath());
try {
d = db.parse(f);
testSuite = processTestSuite(d.getDocumentElement(), mc, f.getName());
for (TestScript ts : processTestScripts(d.getDocumentElement(), mc)) {
try {
if (!digraph.addVertex(new DiNode(ts))) {
throw new DependencyException("Duplicate test name '" + ts.getName()
+ "' found at: "
+ digraph.getVertex(ts.getName()).getTestScript().getFullName());
}
ts.setFileName(f.getName());
testSuite.add(ts);
// run post parse test plugins
Node testRootNode = getTestRootNode(d.getDocumentElement(), ts);
for (IPostParseTestPlugin p : postParseTestPlugins) {
if (testContainsTheTag(testRootNode, p.getTagName())) {
p.execute(new PostTestParserPluginContext(commandRegistry, testSuite, testRootNode));
}
}
} catch (DependencyException de) {
mc.error(de.getMessage());
throw de;
} catch (Throwable th) {
mc.error(th.getMessage());
logger.fatal(th.getMessage());
}
}
} catch (SAXException e) {
mc.error(e.getMessage());
// throw e;
} catch (IOException e) {
mc.error(e.getMessage());
throw e;
} finally {
mc.pop();
}
}
return testSuite;
}
private boolean testContainsTheTag(Node testRootNode, String tagName) {
NodeList children = testRootNode.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
if (children.item(i).getNodeName().equalsIgnoreCase(tagName)) {
return true;
}
}
return false;
}
private Node getTestRootNode(Element documentElement, TestScript ts) {
NodeList list = documentElement.getElementsByTagName("test");
for (int i = 0; i < list.getLength(); i++) {
Node n = list.item(i);
if (n.getAttributes().getNamedItem("name").getNodeValue().equals(ts.getOriginalName())) {
return n;
}
}
return null;
}
public final TestSuite processTestSuite(Element element, MessageCollector mc, String fileName)
throws ParsingException, NameFormatException {
TestSuite testSuite = new TestSuite(fileName);
if (element.getNodeName().equalsIgnoreCase("testsuite")) {
AttributeHelper testScriptAttributeHelper = new AttributeHelper(element);
String testSuiteName = null;
try {
testSuiteName = testScriptAttributeHelper.getRequiredString("name");
testSuite.setTestSuiteName(testSuiteName);
} catch (MissingAttributeException e) {
mc.push(e.getMessage());
logger.debug("Oops! Test suite has no 'name' attribute! ('" + element.toString()
+ "')");
}
testSuite.setDependencies(new Dependencies(null, null));
testSuite.setExclusions(new Dependencies(null, null));
return testSuite;
} else {
UnexpectedElementException z = new UnexpectedElementException(element);
mc.error(z.getMessage());
throw z;
}
}
private List<TestScript> processTestScripts(Element element, MessageCollector mc)
throws ParsingException {
if (element.getNodeName().equalsIgnoreCase("testsuite")) {
List<TestScript> testScripts = null;
AttributeHelper testScriptAttributeHelper = new AttributeHelper(element);
String testSuiteName = null;
try {
testSuiteName = testScriptAttributeHelper.getRequiredString("name");
} catch (MissingAttributeException e) {
mc.push(e.getMessage());
logger.debug("Oops! Test suite hasn't 'name' attibite! ('" + element.toString()
+ "')");
}
// MULTITHREADED 2012.4 Changes START
// -------------------------------------
// This will seperate out the elements at the testsuite level
ArrayList<Element> testSuiteChildren = (ArrayList<Element>) ParserHelper
.getChildren(element);
ArrayList<Element> tests = new ArrayList<Element>();
for (Element e : testSuiteChildren) {
if (e.getNodeName().equalsIgnoreCase("test")) {
tests.add(e);
}
}
testScripts = parseTests(tests, mc, testSuiteName);
// MULTITHREADED 2012.4 Changes END
// -------------------------------------
return testScripts;
} else {
UnexpectedElementException z = new UnexpectedElementException(element);
mc.error(z.getMessage());
throw z;
}
}
private List<TestScript> parseTests(List<Element> elementList, MessageCollector mc,
String testSuiteName) throws ExceptionAccumulator {
List<TestScript> testScripts = new ArrayList<TestScript>();
ExceptionAccumulator acc = new ExceptionAccumulator();
for (Element child : elementList) {
try {
AttributeHelper testAttributeHelper = new AttributeHelper(child);
String testName = testAttributeHelper.getOptionalString("name");
int testLoopNumber = getLoopNumber(testAttributeHelper, mc);
String testDataFile = getTestDataFile(child);
String sheetName = getSheetName(child);
List<List<String>> testData = null;
int tsNamefromFilePosition = -1;
if (testDataFile != null) {
if (testDataFile.endsWith(".xlsx")) {
testData = getExcelDataFromFile(testDataFile, sheetName, mc, true);
} else if (testDataFile.endsWith(".xls")) {
testData = getExcelDataFromFile(testDataFile, sheetName, mc, false);
} else if (testDataFile.endsWith(".csv")) {
testData = getCSVDataFromFile(testDataFile, mc);
} else {
logger.fatal("Oops! can't parse test data file ('" + testDataFile
+ "'). Supported 'xls', 'xlsx' and 'csv' extentions.");
}
tsNamefromFilePosition = getTSNameFromDataFilePosition(testData);
}
for (int i = 1; i <= testLoopNumber; i++) {
TestScript ts = processTestScript(child, mc);
ts.setTestSuiteName(testSuiteName);
if (testData != null && testData.size() > 0) {
int rowNumber = 0;
List<String> titleRow = null;
for (List<String> row : testData) {
String tsNamefromFile = "";
if (tsNamefromFilePosition >= 0) {
tsNamefromFile = "testNameFromDataFile-"
+ row.get(tsNamefromFilePosition);
}
if (0 == rowNumber) {
titleRow = row;
} else {
if (testLoopNumber != 1) {
ts.setName(testName + " [data file row #" + rowNumber
+ "; iteration #" + i + " of " + testLoopNumber + "]; "
+ tsNamefromFile);
} else {
ts.setName(testName + " [data file row #" + rowNumber + "] ; "
+ tsNamefromFile);
}
ts.setDescription(ts.getDescription() + " ("
+ StringHelper.getZipAndConcatenated(titleRow, row, ", ")
+ ")");
setTestDataToTestScript(titleRow, row, ts);
testScripts.add(ts);
}
rowNumber++;
ts = processTestScript(child, mc);
ts.setTestSuiteName(testSuiteName);
}
} else {
if (testLoopNumber > 1) {
ts.setName(testName + " [iteration " + i + " of " + testLoopNumber
+ "]");
}
testScripts.add(ts);
}
}
} catch (Throwable th) {
logger.fatal(th.getMessage());
mc.error(th.getMessage());
acc.add(th);
}
}
if (!acc.isEmpty()) {
throw acc;
}
return testScripts;
}
private String getTestDataFile(Element test) {
for (Element testChild : ParserHelper.getChildren(test)) {
if (testChild.getTagName().equalsIgnoreCase("testData")) {
String file = testChild.getAttribute("file");
if (file != null && file.length() > 0) {
return file;
}
}
}
return null;
}
private String getSheetName(Element test) {
for (Element testChild : ParserHelper.getChildren(test)) {
if (testChild.getTagName().equalsIgnoreCase("testData")) {
String file = testChild.getAttribute("sheet");
if (file != null && file.length() > 0) {
return file;
}
}
}
return null;
}
private List<List<String>> getExcelDataFromFile(String testDataFile, String sheetName,
MessageCollector mc, boolean isXlsx) {
if (testDataFile != null && testDataFile.length() > 0) {
ExcelFileParser excelFileParser = null;
try {
if (sheetName != null) {
excelFileParser = new ExcelFileParser(testDataFile, sheetName, isXlsx);
} else {
excelFileParser = new ExcelFileParser(testDataFile, isXlsx);
}
return excelFileParser.parseExcelFile(isXlsx);
} catch (Exception e) {
String errorMessage = "Oops! Can't parse excel file '" + testDataFile + "'!";
logger.fatal(errorMessage);
mc.error(errorMessage);
}
}
return null;
}
private List<List<String>> getCSVDataFromFile(String testDataFile, MessageCollector mc) {
List<List<String>> result = new ArrayList<List<String>>();
CSVReader reader = null;
try {
reader = new CSVReader(new FileReader(testDataFile));
List<String> nextLine;
while ((nextLine = StringHelper.ArrayToList(reader.readNext())) != null) {
if ((nextLine != null) && (nextLine.size() > 0)
&& (!nextLine.get(0).startsWith("#"))) {
result.add(nextLine);
}
}
} catch (Exception e) {
logger.fatal("Oops! Can't open file '" + testDataFile + "'!");
return null;
} finally {
if (reader != null) {
try {
reader.close();
} catch (Exception e) {
//Dont care
logger.fatal("Oops! Can't close file '" + testDataFile + "'!");
}
}
}
return result;
}
private int getLoopNumber(AttributeHelper testAttributeHelper, MessageCollector mc) {
String testLoop = testAttributeHelper.getOptionalString("loop");
int testLoopNumber = 1;
if (testLoop != null) {
try {
testLoopNumber = Integer.parseInt(testLoop);
return testLoopNumber;
} catch (java.lang.NumberFormatException e) {
String errorMessage = "Oops! Can't parse test 'loop' property ('"
+ testLoop
+ "'). It has be number like '3'. This parameter means number of execution for this test. Fix your test case script, please!";
logger.fatal(errorMessage);
mc.error(errorMessage);
}
}
return 1;
}
// TODO: Start with here
// This gets passed the row of titles, a row of data, and the testscript
// associated with the file
// It then goes through and adds the
// this gets called for every row of data in the datafile
// once this method executes, the testscript is added to a list of
// testscripts.
// so each row of data is effectively a new test script
private void setTestDataToTestScript(List<String> title, List<String> data, TestScript ts) {
for (Invocation statement : ts.getBody()) {
if (statement instanceof Invocation) {
Map<String, Object> parameters = ((Invocation) statement).getParameters();
for (int i = 0; i < title.size(); i++) {
if (i < data.size()) {
parameters.put(title.get(i), data.get(i));
} else {
parameters.put(title.get(i), "");
}
}
}
/**
* if (statement instanceof TryRecoverCleanup) { TryRecoverCleanup
* trcSt = (TryRecoverCleanup) statement;
*
* // StatementList statementList = trcSt.getTry(); //
* statementList.addAll(trcSt.getRecover()); //
* statementList.addAll(trcSt.getCleanup()); // TODO: Ask author why
* this was placed and when?
*
* for (IStatement currentStatement : statementList) { if
* (currentStatement instanceof Invocation) { Map<String, Object>
* parameters = ((Invocation) currentStatement) .getParameters();
* for (int i = 0; i < title.size(); i++) { if (i < data.size()) {
* parameters.put(title.get(i), data.get(i)); } else {
* parameters.put(title.get(i), ""); } } } } }
**/
}
}
public final TestScript processTestScript(Element elem, MessageCollector mc)
throws ParsingException {
preprocessTestScript(ParserHelper.getRequireElement(elem, "teststeps"), mc);
AttributeHelper ah = new AttributeHelper(elem);
String name = null;
try {
name = ah.getRequiredString("name");
} catch (MissingAttributeException e) {
mc.push(e.getMessage());
throw e;
}
boolean isCaptureSystemInformation = false;
String isCaptureSystemInformationStr = ah.getOptionalString("isCaptureSystemInformation");
if (isCaptureSystemInformationStr != null) {
if (isCaptureSystemInformationStr.equalsIgnoreCase("true")
|| isCaptureSystemInformationStr.equalsIgnoreCase("1")
|| isCaptureSystemInformationStr.equalsIgnoreCase("yes")) {
isCaptureSystemInformation = true;
} else if (!isCaptureSystemInformationStr.equalsIgnoreCase("false")
&& !isCaptureSystemInformationStr.equalsIgnoreCase("0")
&& !isCaptureSystemInformationStr.equalsIgnoreCase("no")) {
logger.fatal("Oops! Can't parse '"
+ isCaptureSystemInformationStr
+ "'. It can be only one of 'true'/'false', '1'/'0' or 'yes'/'no'. Fix your test script, please!..");
}
}
try {
mc.push("In test " + name);
TestScript testScript = new TestScript(name, isCaptureSystemInformation);
testScript.setBody(stmtParser.processStatementList(ParserHelper.getRequireElement(elem,
"teststeps"), mc));
// Multithread 2012.4 CHANGES START------------------------
// This is to reference the local test case values
Element dependentElement = ParserHelper.getFirstChildElementCaseInsensitive(elem,
"dependencies");
Set<String> dependentListTests = new HashSet<String>();
Set<String> dependentListTestSuites = new HashSet<String>();
if (dependentElement != null) {
for (Element e : ParserHelper.getChildren(dependentElement)) {
if (e.getNodeName().equalsIgnoreCase("test")) {
dependentListTests.add(new AttributeHelper(e).getRequiredString("name"));
} else if (e.getNodeName().equalsIgnoreCase("testsuite")) {
dependentListTestSuites.add(new AttributeHelper(e)
.getRequiredString("name"));
}
}
}
Element exclusionElement = ParserHelper.getFirstChildElementCaseInsensitive(elem,
"exclusions");
Set<String> excludedListTests = new HashSet<String>();
Set<String> excludedListTestSuites = new HashSet<String>();
if (exclusionElement != null) {
for (Element e : ParserHelper.getChildren(exclusionElement)) {
if (e.getNodeName().equalsIgnoreCase("test")) {
excludedListTests.add(new AttributeHelper(e).getRequiredString("name"));
} else if (e.getNodeName().equalsIgnoreCase("testsuite")) {
excludedListTestSuites
.add(new AttributeHelper(e).getRequiredString("name"));
}
}
}
testScript.setDependencies(dependentListTestSuites, dependentListTests);
testScript.setExclusions(excludedListTestSuites, excludedListTests);
// Multithread 2012.4 CHANGES END------------------------
testScript.setTestCaseID(ah.getOptionalString("testcaseid"));
Element issuesElem = ParserHelper.getOptionalElement(elem, "issues");
if (issuesElem != null) {
testScript.setIssue(processIssue(issuesElem));
testScript.setCRs(processCRs(issuesElem));
}
testScript.setStatus(ah.getOptionalString("status"));
Element avElem = ParserHelper.getFirstChildElementCaseInsensitive(elem,
"automationvalue");
if (avElem != null) {
// not sure why this doesn't just call avElem.getTextContent()
testScript.setAutomationValue(stmtParser.processString(avElem, mc).toString());
}
// TODO: Should we make this into a list?
// retval.getRequirements() = new
Element covElem = ParserHelper.getOptionalElement(elem, "coverage");
if (covElem != null) {
// Requirement List
List<Element> children = ParserHelper.getChildren(covElem);
ArrayList<Requirement> requirements = new ArrayList<Requirement>();
StringBuffer coverage = new StringBuffer();
if (!children.isEmpty()) {
for (Element child : children) {
Requirement requirement = new Requirement();
String type = child.getAttribute("type");
String value = child.getTextContent();
if (!type.equals("")) {
requirement.setType(child.getAttribute("type"));
requirement.setValue(value.trim());
requirements.add(requirement);
}
// to maintain coverage for tracebility matrix
coverage = coverage.append(value.trim() + ",");
}
testScript.setRequirements(requirements);
testScript.setCoverage(coverage.substring(0, coverage.length() - 1));
} else {
testScript.setCoverage(stmtParser.processString(covElem, mc).toString().trim());
}
}
Element descElem = ParserHelper.getOptionalElement(elem, "desc");
if (descElem != null) {
testScript.setDescription(stmtParser.processString(descElem, mc).toString());
}
return testScript;
} catch (NameFormatException e) {
mc.error(e.getMessage());
throw new ParsingException(e);
} finally {
mc.pop();
}
}
private void preprocessTestScript(Element elem, MessageCollector mc) {
List<Element> children = ParserHelper.getChildren(elem);
if (children.isEmpty()) {
return;
}
// If we detect a Teardown command, then we need to create
// a Try...Cleanup block
final Element last = children.get(children.size() - 1);
if (last.getNodeName().equalsIgnoreCase("teardown")) {
final Element safety = elem.getOwnerDocument().createElement("TryRecoverCleanup");
final Element tryBlock = elem.getOwnerDocument().createElement("try");
final Element cleanupBlock = elem.getOwnerDocument().createElement("cleanup");
safety.appendChild(tryBlock);
safety.appendChild(cleanupBlock);
for (int i = 0, max = children.size() - 1; i < max; ++i) {
Element next = children.get(i);
elem.removeChild(next);
tryBlock.appendChild(next);
}
elem.removeChild(last);
cleanupBlock.setAttribute("isTearDown", "true");
for (Element e : ParserHelper.getChildren(last)) {
cleanupBlock.appendChild(e);
}
elem.appendChild(safety);
}
}
private String processIssue(Element elem) {
String retval = "";
NodeList nl = elem.getChildNodes();
for (int i = 0; i < nl.getLength(); ++i) {
Node n = nl.item(i);
if (n.getNodeType() == Node.TEXT_NODE) {
retval += n.getNodeValue();
}
}
if (!retval.equals("")) {
return retval.trim();
}
else {
return null;
}
}
private List<String> processCRs(Element elem) {
List<String> retval = new ArrayList<String>();
for (Element child : ParserHelper.getChildren(elem)) {
if (child.getNodeName().equalsIgnoreCase("cr")) {
String crNumber = child.getAttribute("no");
retval.add(crNumber);
}
}
return retval;
}
private int getTSNameFromDataFilePosition(List<List<String>> testData) {
// try to findout 'JTAF.test.name' column. Value from this column
// necessary to add to ts name.
if (testData != null && testData.size() > 0) {
List<String> firstLine = testData.get(0);
int pos = 0;
for (String firstLineItem : firstLine) {
if (firstLineItem != null && firstLineItem.equalsIgnoreCase("JTAF.test.name")) {
return pos;
} else {
pos++;
}
}
}
return -1;
}
}
| apache-2.0 |
apache/chukwa | core/contrib/chukwa-pig/test/src/java/org/apache/hadoop/chukwa/util/GenerateTestFile.java | 10584 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.util;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
public class GenerateTestFile {
/* Pig Test:
A = load './chukwaTestFile.evt' using org.apache.hadoop.chukwa.pig.ChukwaLoader() as (ts: long,fields);
Dump A;
(1242205800L,[A#7,B#3,csource#M0,C#9])
(1242205800L,[D#1,csource#M0])
(1242205800L,[A#17,csource#M1])
(1242205800L,[B#37,C#51,csource#M1])
(1242205860L,[D#12,A#8,csource#M0,C#3])
(1242205860L,[A#8,B#6,csource#M0])
(1242205860L,[D#6,A#13.2,B#23,C#8.5,csource#M1])
(1242205860L,[D#6,A#13.2,B#23,C#8.5,csource#M1])
(1242205920L,[D#6,E#48.5,A#8,B#6,C#8,csource#M0])
(1242205920L,[D#61.9,E#40.3,A#8.3,B#5.2,C#37.7,csource#M1])
(1242205980L,[A#18.3,B#1.2,csource#M1,C#7.7])
(1242205980L,[D#6.1,A#8.9,B#8.3,C#7.2,csource#M2])
(1242205920L,[A#12.5,B#26.82,csource#M3,C#89.51])
(1242205920L,[A#13.91,B#21.02,csource#M4,C#18.05])
B = group A by (ts,fields#'csource');
Dump B;
((1242205800L,M0),{(1242205800L,[A#7,B#3,csource#M0,C#9]),(1242205800L,[D#1,csource#M0])})
((1242205800L,M1),{(1242205800L,[A#17,csource#M1]),(1242205800L,[B#37,C#51,csource#M1])})
((1242205860L,M0),{(1242205860L,[D#12,A#8,csource#M0,C#3]),(1242205860L,[A#8,B#6,csource#M0])})
((1242205860L,M1),{(1242205860L,[D#6,A#13.2,B#23,C#8.5,csource#M1]),(1242205860L,[D#6,A#13.2,B#23,C#8.5,csource#M1])})
((1242205920L,M0),{(1242205920L,[D#6,E#48.5,A#8,B#6,C#8,csource#M0])})
((1242205920L,M1),{(1242205920L,[D#61.9,E#40.3,A#8.3,B#5.2,C#37.7,csource#M1])})
((1242205920L,M3),{(1242205920L,[A#12.5,B#26.82,csource#M3,C#89.51])})
((1242205920L,M4),{(1242205920L,[A#13.91,B#21.02,csource#M4,C#18.05])})
((1242205980L,M1),{(1242205980L,[A#18.3,B#1.2,csource#M1,C#7.7])})
((1242205980L,M2),{(1242205980L,[D#6.1,A#8.9,B#8.3,C#7.2,csource#M2])})
C = FOREACH B GENERATE group.$0,group.$1,org.apache.hadoop.chukwa.RecordMerger(A.fields);
Dump C;
(1242205800L,M0,[D#1,A#7,B#3,csource#M0,C#9])
(1242205800L,M1,[A#17,B#37,C#51,csource#M1])
(1242205860L,M0,[D#12,A#8,B#6,csource#M0,C#3])
(1242205860L,M1,[D#6,A#13.2,B#23,csource#M1,C#8.5])
(1242205920L,M0,[D#6,E#48.5,A#8,B#6,csource#M0,C#8])
(1242205920L,M1,[D#61.9,E#40.3,A#8.3,B#5.2,csource#M1,C#37.7])
(1242205920L,M3,[A#12.5,B#26.82,C#89.51,csource#M3])
(1242205920L,M4,[A#13.91,B#21.02,C#18.05,csource#M4])
(1242205980L,M1,[A#18.3,B#1.2,C#7.7,csource#M1])
(1242205980L,M2,[D#6.1,A#8.9,B#8.3,csource#M2,C#7.2])
*/
public static Configuration conf = null;
public static FileSystem fs = null;
public static void main(String[] args) throws Exception {
conf = new Configuration();
fs = FileSystem.getLocal(conf);
createFile(null);
}
public static void createFile(String path) throws Exception {
Path outputFile = null;
if (path != null) {
outputFile = new Path(path + "/chukwaTestFile.evt");
} else {
outputFile = new Path("chukwaTestFile.evt");
}
outputFile = outputFile.makeQualified(fs);
if (fs.exists(outputFile)) {
System.out.println("File already there, exit -1," + outputFile );
return;
}
System.out.println("outputFile:" + outputFile);
SequenceFile.Writer seqFileWriter = SequenceFile.createWriter(fs, conf,outputFile, ChukwaRecordKey.class,ChukwaRecord.class,CompressionType.NONE);
ChukwaRecordKey key = new ChukwaRecordKey();
key.setReduceType("TestSeqFile");
String chukwaKey = "";
String machine = "";
String TimePartion = "1242205200"; //Wed, 13 May 2009 09:00:00 GMT
{
machine = "M0";
long time = 1242205800; // Wed, 13 May 2009 09:10:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "7");
record.add("B", "3");
record.add("C", "9");
seqFileWriter.append(key, record);
}
{
machine = "M0";
long time = 1242205800; // Wed, 13 May 2009 09:10:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("D", "1");
seqFileWriter.append(key, record);
}
{
machine = "M1";
long time = 1242205800; // Wed, 13 May 2009 09:10:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "17");
seqFileWriter.append(key, record);
}
{
machine = "M1";
long time = 1242205800; // Wed, 13 May 2009 09:10:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("B", "37");
record.add("C", "51");
seqFileWriter.append(key, record);
}
{
machine = "M0";
long time = 1242205860; // Wed, 13 May 2009 09:10:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "8");
record.add("C", "3");
record.add("D", "12");
seqFileWriter.append(key, record);
}
{
machine = "M0";
long time = 1242205860; // Wed, 13 May 2009 09:11:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "8");
record.add("B", "6");
seqFileWriter.append(key, record);
}
{
machine = "M1";
long time = 1242205860; // Wed, 13 May 2009 09:11:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "13.2");
record.add("B", "23");
record.add("C", "8.5");
record.add("D", "6");
// create duplicate
seqFileWriter.append(key, record);
seqFileWriter.append(key, record);
}
{
machine = "M0";
long time = 1242205920; // Wed, 13 May 2009 09:12:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "8");
record.add("B", "6");
record.add("C", "8");
record.add("D", "6");
record.add("E", "48.5");
seqFileWriter.append(key, record);
}
{
machine = "M1";
long time = 1242205920; // Wed, 13 May 2009 09:12:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "8.3");
record.add("B", "5.2");
record.add("C", "37.7");
record.add("D", "61.9");
record.add("E", "40.3");
seqFileWriter.append(key, record);
}
{
machine = "M1";
long time = 1242205980; // Wed, 13 May 2009 09:13:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "18.3");
record.add("B", "1.2");
record.add("C", "7.7");
seqFileWriter.append(key, record);
}
{
machine = "M2";
long time = 1242205980; // Wed, 13 May 2009 09:13:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "8.9");
record.add("B", "8.3");
record.add("C", "7.2");
record.add("D", "6.1");
seqFileWriter.append(key, record);
}
{
machine = "M3";
// late arrival T0
long time = 1242205920; // Wed, 13 May 2009 09:12:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "12.5");
record.add("B", "26.82");
record.add("C", "89.51");
seqFileWriter.append(key, record);
}
{
machine = "M4";
// late arrival T0
long time = 1242205920; // Wed, 13 May 2009 09:12:00 GMT
chukwaKey = TimePartion +"/" + machine +"/" + time;
key.setKey(chukwaKey);
ChukwaRecord record = new ChukwaRecord();
record.setTime(time);
record.add("csource", machine);
record.add("A", "13.91");
record.add("B", "21.02");
record.add("C", "18.05");
seqFileWriter.append(key, record);
}
seqFileWriter.close();
}
}
| apache-2.0 |
gaapt/deepdive | src/test/scala/unit/inference/test_serialization/SerializationReader.scala | 1431 | package org.deepdive.test.unit
import java.io._
class SerializationReader(weightsInput: InputStream, variablesInput: InputStream,
factorsInput: InputStream, edgesInput: InputStream, metaDataInput: FileReader) {
val weightStream = new DataInputStream(weightsInput)
val variableStream = new DataInputStream(variablesInput)
val factorStream = new DataInputStream(factorsInput)
val edgeStream = new DataInputStream(edgesInput)
val metaStream = new BufferedReader(metaDataInput)
def readWeights : WeightTest = {
val weight = new WeightTest(
weightStream.readLong(),
weightStream.readBoolean(),
weightStream.readDouble())
return weight
}
def readVariables : VariableTest = {
val variable = new VariableTest(
variableStream.readLong(),
variableStream.readBoolean(),
variableStream.readDouble(),
variableStream.readShort(),
variableStream.readLong(),
variableStream.readLong())
return variable
}
def readFactors : FactorTest = {
val factor = new FactorTest(
factorStream.readLong(),
factorStream.readLong(),
factorStream.readShort(),
factorStream.readLong())
return factor
}
def readEdges : EdgeTest = {
val edge = new EdgeTest(
edgeStream.readLong(),
edgeStream.readLong(),
edgeStream.readLong(),
edgeStream.readBoolean(),
edgeStream.readLong())
return edge
}
} | apache-2.0 |
sergecodd/FireFox-OS | B2G/gecko/content/svg/content/src/SVGOrientSMILType.cpp | 5664 | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "SVGOrientSMILType.h"
#include "nsSMILValue.h"
#include "nsSVGViewBox.h"
#include "nsSVGAngle.h"
#include "nsIDOMSVGMarkerElement.h"
#include "nsDebug.h"
#include <math.h>
namespace mozilla {
/*static*/ SVGOrientSMILType SVGOrientSMILType::sSingleton;
void
SVGOrientSMILType::Init(nsSMILValue& aValue) const
{
NS_ABORT_IF_FALSE(aValue.IsNull(), "Unexpected value type");
aValue.mU.mOrient.mAngle = 0.0f;
aValue.mU.mOrient.mUnit = nsIDOMSVGAngle::SVG_ANGLETYPE_UNSPECIFIED;
aValue.mU.mOrient.mOrientType = nsIDOMSVGMarkerElement::SVG_MARKER_ORIENT_ANGLE;
aValue.mType = this;
}
void
SVGOrientSMILType::Destroy(nsSMILValue& aValue) const
{
NS_PRECONDITION(aValue.mType == this, "Unexpected SMIL value.");
aValue.mU.mPtr = nullptr;
aValue.mType = &nsSMILNullType::sSingleton;
}
nsresult
SVGOrientSMILType::Assign(nsSMILValue& aDest, const nsSMILValue& aSrc) const
{
NS_PRECONDITION(aDest.mType == aSrc.mType, "Incompatible SMIL types.");
NS_PRECONDITION(aDest.mType == this, "Unexpected SMIL value.");
aDest.mU.mOrient.mAngle = aSrc.mU.mOrient.mAngle;
aDest.mU.mOrient.mUnit = aSrc.mU.mOrient.mUnit;
aDest.mU.mOrient.mOrientType = aSrc.mU.mOrient.mOrientType;
return NS_OK;
}
bool
SVGOrientSMILType::IsEqual(const nsSMILValue& aLeft,
const nsSMILValue& aRight) const
{
NS_PRECONDITION(aLeft.mType == aRight.mType, "Incompatible SMIL types");
NS_PRECONDITION(aLeft.mType == this, "Unexpected type for SMIL value");
return
aLeft.mU.mOrient.mAngle == aRight.mU.mOrient.mAngle &&
aLeft.mU.mOrient.mUnit == aRight.mU.mOrient.mUnit &&
aLeft.mU.mOrient.mOrientType == aRight.mU.mOrient.mOrientType;
}
nsresult
SVGOrientSMILType::Add(nsSMILValue& aDest, const nsSMILValue& aValueToAdd,
uint32_t aCount) const
{
NS_PRECONDITION(aValueToAdd.mType == aDest.mType,
"Trying to add invalid types");
NS_PRECONDITION(aValueToAdd.mType == this, "Unexpected source type");
if (aDest.mU.mOrient.mOrientType != nsIDOMSVGMarkerElement::SVG_MARKER_ORIENT_ANGLE ||
aValueToAdd.mU.mOrient.mOrientType != nsIDOMSVGMarkerElement::SVG_MARKER_ORIENT_ANGLE) {
// TODO: it would be nice to be able to add to auto angles
return NS_ERROR_FAILURE;
}
// We may be dealing with two different angle units, so we normalize to
// degrees for the add:
float currentAngle = aDest.mU.mOrient.mAngle *
nsSVGAngle::GetDegreesPerUnit(aDest.mU.mOrient.mUnit);
float angleToAdd = aValueToAdd.mU.mOrient.mAngle *
nsSVGAngle::GetDegreesPerUnit(aValueToAdd.mU.mOrient.mUnit) *
aCount;
// And then we give the resulting animated value the same units as the value
// that we're animating to/by (i.e. the same as aValueToAdd):
aDest.mU.mOrient.mAngle = (currentAngle + angleToAdd) /
nsSVGAngle::GetDegreesPerUnit(aValueToAdd.mU.mOrient.mUnit);
aDest.mU.mOrient.mUnit = aValueToAdd.mU.mOrient.mUnit;
return NS_OK;
}
nsresult
SVGOrientSMILType::ComputeDistance(const nsSMILValue& aFrom,
const nsSMILValue& aTo,
double& aDistance) const
{
NS_PRECONDITION(aFrom.mType == aTo.mType,"Trying to compare different types");
NS_PRECONDITION(aFrom.mType == this, "Unexpected source type");
if (aFrom.mU.mOrient.mOrientType != nsIDOMSVGMarkerElement::SVG_MARKER_ORIENT_ANGLE ||
aTo.mU.mOrient.mOrientType != nsIDOMSVGMarkerElement::SVG_MARKER_ORIENT_ANGLE) {
// TODO: it would be nice to be able to compute distance with auto angles
return NS_ERROR_FAILURE;
}
// Normalize both to degrees in case they're different angle units:
double from = aFrom.mU.mOrient.mAngle *
nsSVGAngle::GetDegreesPerUnit(aFrom.mU.mOrient.mUnit);
double to = aTo.mU.mOrient.mAngle *
nsSVGAngle::GetDegreesPerUnit(aTo.mU.mOrient.mUnit);
aDistance = fabs(to - from);
return NS_OK;
}
nsresult
SVGOrientSMILType::Interpolate(const nsSMILValue& aStartVal,
const nsSMILValue& aEndVal,
double aUnitDistance,
nsSMILValue& aResult) const
{
NS_PRECONDITION(aStartVal.mType == aEndVal.mType,
"Trying to interpolate different types");
NS_PRECONDITION(aStartVal.mType == this,
"Unexpected types for interpolation.");
NS_PRECONDITION(aResult.mType == this, "Unexpected result type.");
if (aStartVal.mU.mOrient.mOrientType != nsIDOMSVGMarkerElement::SVG_MARKER_ORIENT_ANGLE ||
aEndVal.mU.mOrient.mOrientType != nsIDOMSVGMarkerElement::SVG_MARKER_ORIENT_ANGLE) {
// TODO: it would be nice to be able to handle auto angles too.
return NS_ERROR_FAILURE;
}
float start = aStartVal.mU.mOrient.mAngle *
nsSVGAngle::GetDegreesPerUnit(aStartVal.mU.mOrient.mUnit);
float end = aEndVal.mU.mOrient.mAngle *
nsSVGAngle::GetDegreesPerUnit(aEndVal.mU.mOrient.mUnit);
float result = (start + (end - start) * aUnitDistance);
// Again, we use the unit of the to/by value for the result:
aResult.mU.mOrient.mAngle = result /
nsSVGAngle::GetDegreesPerUnit(aEndVal.mU.mOrient.mUnit);
aResult.mU.mOrient.mUnit = aEndVal.mU.mOrient.mUnit;
return NS_OK;
}
} // namespace mozilla
| apache-2.0 |
0359xiaodong/YiBo | YiBo/src/com/shejiaomao/weibo/service/listener/MicroBlogMoreItemClickListener.java | 4403 | package com.shejiaomao.weibo.service.listener;
import java.io.File;
import com.cattong.commons.util.StringUtil;
import com.cattong.entity.Status;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.text.ClipboardManager;
import android.view.View;
import android.widget.Adapter;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.Toast;
import android.widget.AdapterView.OnItemClickListener;
import com.shejiaomao.maobo.R;
import com.shejiaomao.weibo.activity.EditMicroBlogActivity;
import com.shejiaomao.weibo.common.EntityUtil;
import com.shejiaomao.weibo.service.adapter.AdapterUtil;
import com.shejiaomao.weibo.service.adapter.MicroBlogMoreListAdapter;
import com.shejiaomao.weibo.service.adapter.StatusUtil;
import com.shejiaomao.weibo.service.cache.ImageCache;
import com.shejiaomao.weibo.service.cache.wrap.CachedImageKey;
import com.shejiaomao.weibo.service.task.DestroyStatusTask;
import com.shejiaomao.weibo.widget.ListChooseDialog;
public class MicroBlogMoreItemClickListener implements OnItemClickListener {
private ListChooseDialog chooseDialog;
public MicroBlogMoreItemClickListener(ListChooseDialog chooseDialog) {
this.chooseDialog = chooseDialog;
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position,
long id) {
Adapter adapter = parent.getAdapter();
BaseAdapter baseAdapter = AdapterUtil.getAdapter(adapter);
if (!(baseAdapter instanceof MicroBlogMoreListAdapter)) {
return;
}
MicroBlogMoreListAdapter listAdapter = (MicroBlogMoreListAdapter)baseAdapter;
chooseDialog.dismiss();
final Context context = view.getContext();
final Status status = listAdapter.getStatus();
int itemId = (int)listAdapter.getItemId(position);
ClipboardManager clip = (ClipboardManager)context
.getSystemService(Context.CLIPBOARD_SERVICE);
switch (itemId) {
case MicroBlogMoreListAdapter.ITEM_DELETE:
new AlertDialog.Builder(context)
.setTitle(R.string.title_dialog_alert)
.setMessage(R.string.msg_blog_delete)
.setNegativeButton(R.string.btn_cancel,
new AlertDialog.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
})
.setPositiveButton(R.string.btn_confirm,
new AlertDialog.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
DestroyStatusTask task = new DestroyStatusTask(context, status);
task.execute();
}
}).show();
break;
case MicroBlogMoreListAdapter.ITEM_COPY:
String copyStatusText = StatusUtil.extraSimpleStatus(context, status);
clip.setText(copyStatusText);
Toast.makeText(context, R.string.msg_blog_copy, Toast.LENGTH_SHORT).show();
break;
case MicroBlogMoreListAdapter.ITEM_SHARE_TO_ACCOUNTS:
Intent intent = new Intent(Intent.ACTION_SEND);
intent.setClass(context, EditMicroBlogActivity.class);
if (EntityUtil.hasPicture(status)) {
intent.setType("image/*");
CachedImageKey info = EntityUtil.getMaxLocalCachedImageInfo(status);
String imagePath = ImageCache.getRealPath(info);
if (StringUtil.isNotEmpty(imagePath)) {
if (info.getCacheType() == CachedImageKey.IMAGE_THUMBNAIL) {
Toast.makeText(
context,
context.getString(R.string.msg_blog_share_picture_thumbnail),
Toast.LENGTH_LONG
).show();
}
Uri uri = Uri.fromFile(new File(imagePath));
intent.putExtra(Intent.EXTRA_STREAM, uri);
} else {
intent.setType("text/plain");
Toast.makeText(context, context.getString(R.string.msg_blog_share_picture), Toast.LENGTH_LONG).show();
}
} else {
intent.setType("text/plain");
}
String statusText = StatusUtil.extraSimpleStatus(context, status);
clip.setText(statusText);
intent.putExtra(Intent.EXTRA_TEXT, statusText);
intent.putExtra(Intent.EXTRA_SUBJECT, context.getString(R.string.msg_extra_subject));
context.startActivity(intent);
break;
}
}
}
| apache-2.0 |
leventov/druid | core/src/test/java/org/apache/druid/timeline/DataSegmentTest.java | 10395 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.timeline;
import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.RangeSet;
import org.apache.druid.TestObjectMapper;
import org.apache.druid.indexer.partitions.DynamicPartitionsSpec;
import org.apache.druid.indexer.partitions.HashedPartitionsSpec;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.jackson.JacksonUtils;
import org.apache.druid.timeline.DataSegment.PruneSpecsHolder;
import org.apache.druid.timeline.partition.NoneShardSpec;
import org.apache.druid.timeline.partition.NumberedShardSpec;
import org.apache.druid.timeline.partition.PartitionChunk;
import org.apache.druid.timeline.partition.ShardSpec;
import org.apache.druid.timeline.partition.ShardSpecLookup;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
*/
public class DataSegmentTest
{
private static final ObjectMapper MAPPER = new TestObjectMapper();
private static final int TEST_VERSION = 0x9;
private static ShardSpec getShardSpec(final int partitionNum)
{
return new ShardSpec()
{
@Override
public <T> PartitionChunk<T> createChunk(T obj)
{
return null;
}
@Override
public int getPartitionNum()
{
return partitionNum;
}
@Override
public int getNumCorePartitions()
{
return 0;
}
@Override
public ShardSpecLookup getLookup(List<? extends ShardSpec> shardSpecs)
{
return null;
}
@Override
public List<String> getDomainDimensions()
{
return ImmutableList.of();
}
@Override
public boolean possibleInDomain(Map<String, RangeSet<String>> domain)
{
return true;
}
};
}
@Before
public void setUp()
{
InjectableValues.Std injectableValues = new InjectableValues.Std();
injectableValues.addValue(PruneSpecsHolder.class, PruneSpecsHolder.DEFAULT);
MAPPER.setInjectableValues(injectableValues);
}
@Test
public void testV1Serialization() throws Exception
{
final Interval interval = Intervals.of("2011-10-01/2011-10-02");
final ImmutableMap<String, Object> loadSpec = ImmutableMap.of("something", "or_other");
DataSegment segment = new DataSegment(
"something",
interval,
"1",
loadSpec,
Arrays.asList("dim1", "dim2"),
Arrays.asList("met1", "met2"),
new NumberedShardSpec(3, 0),
new CompactionState(
new HashedPartitionsSpec(100000, null, ImmutableList.of("dim1")),
ImmutableMap.of()
),
TEST_VERSION,
1
);
final Map<String, Object> objectMap = MAPPER.readValue(
MAPPER.writeValueAsString(segment),
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
);
Assert.assertEquals(11, objectMap.size());
Assert.assertEquals("something", objectMap.get("dataSource"));
Assert.assertEquals(interval.toString(), objectMap.get("interval"));
Assert.assertEquals("1", objectMap.get("version"));
Assert.assertEquals(loadSpec, objectMap.get("loadSpec"));
Assert.assertEquals("dim1,dim2", objectMap.get("dimensions"));
Assert.assertEquals("met1,met2", objectMap.get("metrics"));
Assert.assertEquals(ImmutableMap.of("type", "numbered", "partitionNum", 3, "partitions", 0), objectMap.get("shardSpec"));
Assert.assertEquals(TEST_VERSION, objectMap.get("binaryVersion"));
Assert.assertEquals(1, objectMap.get("size"));
DataSegment deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
Assert.assertEquals(segment.getDataSource(), deserializedSegment.getDataSource());
Assert.assertEquals(segment.getInterval(), deserializedSegment.getInterval());
Assert.assertEquals(segment.getVersion(), deserializedSegment.getVersion());
Assert.assertEquals(segment.getLoadSpec(), deserializedSegment.getLoadSpec());
Assert.assertEquals(segment.getDimensions(), deserializedSegment.getDimensions());
Assert.assertEquals(segment.getMetrics(), deserializedSegment.getMetrics());
Assert.assertEquals(segment.getShardSpec(), deserializedSegment.getShardSpec());
Assert.assertEquals(segment.getSize(), deserializedSegment.getSize());
Assert.assertEquals(segment.getId(), deserializedSegment.getId());
deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
Assert.assertEquals(0, segment.compareTo(deserializedSegment));
deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
Assert.assertEquals(0, deserializedSegment.compareTo(segment));
deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
Assert.assertEquals(segment.hashCode(), deserializedSegment.hashCode());
}
@Test
public void testIdentifier()
{
final DataSegment segment = DataSegment.builder()
.dataSource("foo")
.interval(Intervals.of("2012-01-01/2012-01-02"))
.version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
.shardSpec(NoneShardSpec.instance())
.size(0)
.build();
Assert.assertEquals(
"foo_2012-01-01T00:00:00.000Z_2012-01-02T00:00:00.000Z_2012-01-01T11:22:33.444Z",
segment.getId().toString()
);
}
@Test
public void testIdentifierWithZeroPartition()
{
final DataSegment segment = DataSegment.builder()
.dataSource("foo")
.interval(Intervals.of("2012-01-01/2012-01-02"))
.version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
.shardSpec(getShardSpec(0))
.size(0)
.build();
Assert.assertEquals(
"foo_2012-01-01T00:00:00.000Z_2012-01-02T00:00:00.000Z_2012-01-01T11:22:33.444Z",
segment.getId().toString()
);
}
@Test
public void testIdentifierWithNonzeroPartition()
{
final DataSegment segment = DataSegment.builder()
.dataSource("foo")
.interval(Intervals.of("2012-01-01/2012-01-02"))
.version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
.shardSpec(getShardSpec(7))
.size(0)
.build();
Assert.assertEquals(
"foo_2012-01-01T00:00:00.000Z_2012-01-02T00:00:00.000Z_2012-01-01T11:22:33.444Z_7",
segment.getId().toString()
);
}
@Test
public void testV1SerializationNullMetrics() throws Exception
{
final DataSegment segment =
makeDataSegment("foo", "2012-01-01/2012-01-02", DateTimes.of("2012-01-01T11:22:33.444Z").toString());
final DataSegment segment2 = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
Assert.assertEquals("empty dimensions", ImmutableList.of(), segment2.getDimensions());
Assert.assertEquals("empty metrics", ImmutableList.of(), segment2.getMetrics());
}
@Test
public void testWithLastCompactionState()
{
final CompactionState compactionState = new CompactionState(
new DynamicPartitionsSpec(null, null),
Collections.singletonMap("test", "map")
);
final DataSegment segment1 = DataSegment.builder()
.dataSource("foo")
.interval(Intervals.of("2012-01-01/2012-01-02"))
.version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
.shardSpec(getShardSpec(7))
.size(0)
.lastCompactionState(compactionState)
.build();
final DataSegment segment2 = DataSegment.builder()
.dataSource("foo")
.interval(Intervals.of("2012-01-01/2012-01-02"))
.version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
.shardSpec(getShardSpec(7))
.size(0)
.build();
Assert.assertEquals(segment1, segment2.withLastCompactionState(compactionState));
}
private DataSegment makeDataSegment(String dataSource, String interval, String version)
{
return DataSegment.builder()
.dataSource(dataSource)
.interval(Intervals.of(interval))
.version(version)
.size(1)
.build();
}
}
| apache-2.0 |
bcec/opennebula3.2.1 | src/oca/ruby/OpenNebula/Group.rb | 5232 | # -------------------------------------------------------------------------- #
# Copyright 2002-2012, OpenNebula Project Leads (OpenNebula.org) #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
require 'OpenNebula/Pool'
module OpenNebula
class Group < PoolElement
#######################################################################
# Constants and Class Methods
#######################################################################
GROUP_METHODS = {
:info => "group.info",
:allocate => "group.allocate",
:delete => "group.delete"
}
# Flag for requesting connected user's group info
SELF = -1
#Default location for group ACL's
if ENV['ONE_LOCATION']
GROUP_DEFAULT = ENV['ONE_LOCATION'] + "/etc/group.default"
else
GROUP_DEFAULT = "/etc/one/group.default"
end
# Creates a Group description with just its identifier
# this method should be used to create plain Group objects.
# +id+ the id of the user
#
# Example:
# group = Group.new(Group.build_xml(3),rpc_client)
#
def Group.build_xml(pe_id=nil)
if pe_id
group_xml = "<GROUP><ID>#{pe_id}</ID></GROUP>"
else
group_xml = "<GROUP></GROUP>"
end
XMLElement.build_xml(group_xml,'GROUP')
end
# Class constructor
def initialize(xml, client)
super(xml,client)
end
#######################################################################
# Group utils
#######################################################################
# Creates ACLs for the group. The ACL rules are described in a file
def create_acls(filename = GROUP_DEFAULT)
if !File.readable?(filename)
return -1, "Can not read deafult ACL file for group"
end
msg = String.new
File.open(filename).each_line{ |l|
next if l.match(/^#/)
rule = "@#{@pe_id} #{l}"
parse = OpenNebula::Acl.parse_rule(rule)
if OpenNebula.is_error?(parse)
return -1, "Error parsing rule #{rule}: #{parse.message}"
end
xml = OpenNebula::Acl.build_xml
acl = OpenNebula::Acl.new(xml, @client)
rc = acl.allocate(*parse)
if OpenNebula.is_error?(rc)
return -1, "Error creating rule #{rule}: #{rc.message}"
else
msg << "ACL_ID: #{acl.id}\n"
end
}
return 0, msg
end
#######################################################################
# XML-RPC Methods for the Group Object
#######################################################################
# Retrieves the information of the given Group.
def info()
super(GROUP_METHODS[:info], 'GROUP')
end
# Allocates a new Group in OpenNebula
#
# +groupname+ A string containing the name of the Group.
def allocate(groupname)
super(GROUP_METHODS[:allocate], groupname)
end
# Deletes the Group
def delete()
super(GROUP_METHODS[:delete])
end
# ---------------------------------------------------------------------
# Helpers to get information
# ---------------------------------------------------------------------
# Returns whether or not the user with id 'uid' is part of this group
def contains(uid)
#This doesn't work in ruby 1.8.5
#return self["USERS/ID[.=#{uid}]"] != nil
id_array = retrieve_elements('USERS/ID')
return id_array != nil && id_array.include?(uid.to_s)
end
# Returns an array with the numeric user ids
def user_ids
array = Array.new
self.each("USERS/ID") do |id|
array << id.text.to_i
end
return array
end
end
end
| apache-2.0 |
GDGLima/AppDevFestLima | android/src/net/abcdroid/devfest12/calendar/SessionAlarmReceiver.java | 1373 | /*
* Copyright 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.abcdroid.devfest12.calendar;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import static net.abcdroid.devfest12.util.LogUtils.makeLogTag;
/**
* {@link BroadcastReceiver} to reinitialize {@link android.app.AlarmManager} for all starred
* session blocks.
*/
public class SessionAlarmReceiver extends BroadcastReceiver {
public static final String TAG = makeLogTag(SessionAlarmReceiver.class);
@Override
public void onReceive(Context context, Intent intent) {
Intent scheduleIntent = new Intent(
SessionAlarmService.ACTION_SCHEDULE_ALL_STARRED_BLOCKS,
null, context, SessionAlarmService.class);
context.startService(scheduleIntent);
}
}
| apache-2.0 |
quarkusio/quarkus | integration-tests/virtual-http/src/test/java/io/quarkus/it/virtual/HttpResponseMessageMock.java | 2404 | package io.quarkus.it.virtual;
import java.util.HashMap;
import java.util.Map;
import com.microsoft.azure.functions.HttpResponseMessage;
import com.microsoft.azure.functions.HttpStatus;
import com.microsoft.azure.functions.HttpStatusType;
/**
* The mock for HttpResponseMessage, can be used in unit tests to verify if the
* returned response by HTTP trigger function is correct or not.
*/
public class HttpResponseMessageMock implements HttpResponseMessage {
private int httpStatusCode;
private HttpStatusType httpStatus;
private Object body;
private Map<String, String> headers;
public HttpResponseMessageMock(final HttpStatusType status, final Map<String, String> headers, final Object body) {
this.httpStatus = status;
this.httpStatusCode = status.value();
this.headers = headers;
this.body = body;
}
@Override
public HttpStatusType getStatus() {
return this.httpStatus;
}
@Override
public int getStatusCode() {
return httpStatusCode;
}
@Override
public String getHeader(String key) {
return this.headers.get(key);
}
@Override
public Object getBody() {
return this.body;
}
public static class HttpResponseMessageBuilderMock implements HttpResponseMessage.Builder {
private Object body;
private int httpStatusCode;
private Map<String, String> headers = new HashMap<>();
private HttpStatusType httpStatus;
public Builder status(HttpStatus status) {
this.httpStatusCode = status.value();
this.httpStatus = status;
return this;
}
@Override
public Builder status(final HttpStatusType httpStatusType) {
this.httpStatusCode = httpStatusType.value();
this.httpStatus = httpStatusType;
return this;
}
@Override
public HttpResponseMessage.Builder header(final String key, final String value) {
this.headers.put(key, value);
return this;
}
@Override
public HttpResponseMessage.Builder body(final Object body) {
this.body = body;
return this;
}
@Override
public HttpResponseMessage build() {
return new HttpResponseMessageMock(this.httpStatus, this.headers, this.body);
}
}
}
| apache-2.0 |
vincentpoon/hbase | hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java | 18340 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterManager.ServiceType;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads;
/**
* Manages the interactions with an already deployed distributed cluster (as opposed to
* a pseudo-distributed, or mini/local cluster). This is used by integration and system tests.
*/
@InterfaceAudience.Private
public class DistributedHBaseCluster extends HBaseCluster {
private Admin admin;
private final Connection connection;
private ClusterManager clusterManager;
public DistributedHBaseCluster(Configuration conf, ClusterManager clusterManager)
throws IOException {
super(conf);
this.clusterManager = clusterManager;
this.connection = ConnectionFactory.createConnection(conf);
this.admin = this.connection.getAdmin();
this.initialClusterStatus = getClusterStatus();
}
public void setClusterManager(ClusterManager clusterManager) {
this.clusterManager = clusterManager;
}
public ClusterManager getClusterManager() {
return clusterManager;
}
/**
* Returns a ClusterStatus for this HBase cluster
* @throws IOException
*/
@Override
public ClusterStatus getClusterStatus() throws IOException {
return admin.getClusterStatus();
}
@Override
public ClusterStatus getInitialClusterStatus() throws IOException {
return initialClusterStatus;
}
@Override
public void close() throws IOException {
if (this.admin != null) {
admin.close();
}
if (this.connection != null && !this.connection.isClosed()) {
this.connection.close();
}
}
@Override
public AdminProtos.AdminService.BlockingInterface getAdminProtocol(ServerName serverName)
throws IOException {
return ((ClusterConnection)this.connection).getAdmin(serverName);
}
@Override
public ClientProtos.ClientService.BlockingInterface getClientProtocol(ServerName serverName)
throws IOException {
return ((ClusterConnection)this.connection).getClient(serverName);
}
@Override
public void startRegionServer(String hostname, int port) throws IOException {
LOG.info("Starting RS on: " + hostname);
clusterManager.start(ServiceType.HBASE_REGIONSERVER, hostname, port);
}
@Override
public void killRegionServer(ServerName serverName) throws IOException {
LOG.info("Aborting RS: " + serverName.getServerName());
clusterManager.kill(ServiceType.HBASE_REGIONSERVER,
serverName.getHostname(), serverName.getPort());
}
@Override
public void stopRegionServer(ServerName serverName) throws IOException {
LOG.info("Stopping RS: " + serverName.getServerName());
clusterManager.stop(ServiceType.HBASE_REGIONSERVER,
serverName.getHostname(), serverName.getPort());
}
@Override
public void waitForRegionServerToStop(ServerName serverName, long timeout) throws IOException {
waitForServiceToStop(ServiceType.HBASE_REGIONSERVER, serverName, timeout);
}
@Override
public void startZkNode(String hostname, int port) throws IOException {
LOG.info("Starting ZooKeeper node on: " + hostname);
clusterManager.start(ServiceType.ZOOKEEPER_SERVER, hostname, port);
}
@Override
public void killZkNode(ServerName serverName) throws IOException {
LOG.info("Aborting ZooKeeper node on: " + serverName.getServerName());
clusterManager.kill(ServiceType.ZOOKEEPER_SERVER,
serverName.getHostname(), serverName.getPort());
}
@Override
public void stopZkNode(ServerName serverName) throws IOException {
LOG.info("Stopping ZooKeeper node: " + serverName.getServerName());
clusterManager.stop(ServiceType.ZOOKEEPER_SERVER,
serverName.getHostname(), serverName.getPort());
}
@Override
public void waitForZkNodeToStart(ServerName serverName, long timeout) throws IOException {
waitForServiceToStart(ServiceType.ZOOKEEPER_SERVER, serverName, timeout);
}
@Override
public void waitForZkNodeToStop(ServerName serverName, long timeout) throws IOException {
waitForServiceToStop(ServiceType.ZOOKEEPER_SERVER, serverName, timeout);
}
@Override
public void startDataNode(ServerName serverName) throws IOException {
LOG.info("Starting data node on: " + serverName.getServerName());
clusterManager.start(ServiceType.HADOOP_DATANODE,
serverName.getHostname(), serverName.getPort());
}
@Override
public void killDataNode(ServerName serverName) throws IOException {
LOG.info("Aborting data node on: " + serverName.getServerName());
clusterManager.kill(ServiceType.HADOOP_DATANODE,
serverName.getHostname(), serverName.getPort());
}
@Override
public void stopDataNode(ServerName serverName) throws IOException {
LOG.info("Stopping data node on: " + serverName.getServerName());
clusterManager.stop(ServiceType.HADOOP_DATANODE,
serverName.getHostname(), serverName.getPort());
}
@Override
public void waitForDataNodeToStart(ServerName serverName, long timeout) throws IOException {
waitForServiceToStart(ServiceType.HADOOP_DATANODE, serverName, timeout);
}
@Override
public void waitForDataNodeToStop(ServerName serverName, long timeout) throws IOException {
waitForServiceToStop(ServiceType.HADOOP_DATANODE, serverName, timeout);
}
private void waitForServiceToStop(ServiceType service, ServerName serverName, long timeout)
throws IOException {
LOG.info("Waiting for service: " + service + " to stop: " + serverName.getServerName());
long start = System.currentTimeMillis();
while ((System.currentTimeMillis() - start) < timeout) {
if (!clusterManager.isRunning(service, serverName.getHostname(), serverName.getPort())) {
return;
}
Threads.sleep(100);
}
throw new IOException("did timeout waiting for service to stop:" + serverName);
}
private void waitForServiceToStart(ServiceType service, ServerName serverName, long timeout)
throws IOException {
LOG.info("Waiting for service: " + service + " to start: " + serverName.getServerName());
long start = System.currentTimeMillis();
while ((System.currentTimeMillis() - start) < timeout) {
if (clusterManager.isRunning(service, serverName.getHostname(), serverName.getPort())) {
return;
}
Threads.sleep(100);
}
throw new IOException("did timeout waiting for service to start:" + serverName);
}
@Override
public MasterService.BlockingInterface getMasterAdminService()
throws IOException {
return ((ClusterConnection)this.connection).getMaster();
}
@Override
public void startMaster(String hostname, int port) throws IOException {
LOG.info("Starting Master on: " + hostname + ":" + port);
clusterManager.start(ServiceType.HBASE_MASTER, hostname, port);
}
@Override
public void killMaster(ServerName serverName) throws IOException {
LOG.info("Aborting Master: " + serverName.getServerName());
clusterManager.kill(ServiceType.HBASE_MASTER, serverName.getHostname(), serverName.getPort());
}
@Override
public void stopMaster(ServerName serverName) throws IOException {
LOG.info("Stopping Master: " + serverName.getServerName());
clusterManager.stop(ServiceType.HBASE_MASTER, serverName.getHostname(), serverName.getPort());
}
@Override
public void waitForMasterToStop(ServerName serverName, long timeout) throws IOException {
waitForServiceToStop(ServiceType.HBASE_MASTER, serverName, timeout);
}
@Override
public boolean waitForActiveAndReadyMaster(long timeout) throws IOException {
long start = System.currentTimeMillis();
while (System.currentTimeMillis() - start < timeout) {
try {
getMasterAdminService();
return true;
} catch (MasterNotRunningException m) {
LOG.warn("Master not started yet " + m);
} catch (ZooKeeperConnectionException e) {
LOG.warn("Failed to connect to ZK " + e);
}
Threads.sleep(1000);
}
return false;
}
@Override
public ServerName getServerHoldingRegion(TableName tn, byte[] regionName) throws IOException {
HRegionLocation regionLoc = null;
try (RegionLocator locator = connection.getRegionLocator(tn)) {
regionLoc = locator.getRegionLocation(regionName, true);
}
if (regionLoc == null) {
LOG.warn("Cannot find region server holding region " + Bytes.toString(regionName) +
", start key [" + Bytes.toString(HRegionInfo.getStartKey(regionName)) + "]");
return null;
}
AdminProtos.AdminService.BlockingInterface client =
((ClusterConnection)this.connection).getAdmin(regionLoc.getServerName());
ServerInfo info = ProtobufUtil.getServerInfo(null, client);
return ProtobufUtil.toServerName(info.getServerName());
}
@Override
public void waitUntilShutDown() {
// Simply wait for a few seconds for now (after issuing serverManager.kill
throw new RuntimeException(HConstants.NOT_IMPLEMENTED);
}
@Override
public void shutdown() throws IOException {
// not sure we want this
throw new RuntimeException(HConstants.NOT_IMPLEMENTED);
}
@Override
public boolean isDistributedCluster() {
return true;
}
@Override
public boolean restoreClusterStatus(ClusterStatus initial) throws IOException {
ClusterStatus current = getClusterStatus();
LOG.info("Restoring cluster - started");
// do a best effort restore
boolean success = true;
success = restoreMasters(initial, current) & success;
success = restoreRegionServers(initial, current) & success;
success = restoreAdmin() & success;
LOG.info("Restoring cluster - done");
return success;
}
protected boolean restoreMasters(ClusterStatus initial, ClusterStatus current) {
List<IOException> deferred = new ArrayList<>();
//check whether current master has changed
final ServerName initMaster = initial.getMaster();
if (!ServerName.isSameAddress(initMaster, current.getMaster())) {
LOG.info("Restoring cluster - Initial active master : "
+ initMaster.getHostAndPort()
+ " has changed to : "
+ current.getMaster().getHostAndPort());
// If initial master is stopped, start it, before restoring the state.
// It will come up as a backup master, if there is already an active master.
try {
if (!clusterManager.isRunning(ServiceType.HBASE_MASTER,
initMaster.getHostname(), initMaster.getPort())) {
LOG.info("Restoring cluster - starting initial active master at:"
+ initMaster.getHostAndPort());
startMaster(initMaster.getHostname(), initMaster.getPort());
}
// master has changed, we would like to undo this.
// 1. Kill the current backups
// 2. Stop current master
// 3. Start backup masters
for (ServerName currentBackup : current.getBackupMasters()) {
if (!ServerName.isSameAddress(currentBackup, initMaster)) {
LOG.info("Restoring cluster - stopping backup master: " + currentBackup);
stopMaster(currentBackup);
}
}
LOG.info("Restoring cluster - stopping active master: " + current.getMaster());
stopMaster(current.getMaster());
waitForActiveAndReadyMaster(); // wait so that active master takes over
} catch (IOException ex) {
// if we fail to start the initial active master, we do not want to continue stopping
// backup masters. Just keep what we have now
deferred.add(ex);
}
//start backup masters
for (ServerName backup : initial.getBackupMasters()) {
try {
//these are not started in backup mode, but we should already have an active master
if (!clusterManager.isRunning(ServiceType.HBASE_MASTER,
backup.getHostname(),
backup.getPort())) {
LOG.info("Restoring cluster - starting initial backup master: "
+ backup.getHostAndPort());
startMaster(backup.getHostname(), backup.getPort());
}
} catch (IOException ex) {
deferred.add(ex);
}
}
} else {
//current master has not changed, match up backup masters
Set<ServerName> toStart = new TreeSet<>(new ServerNameIgnoreStartCodeComparator());
Set<ServerName> toKill = new TreeSet<>(new ServerNameIgnoreStartCodeComparator());
toStart.addAll(initial.getBackupMasters());
toKill.addAll(current.getBackupMasters());
for (ServerName server : current.getBackupMasters()) {
toStart.remove(server);
}
for (ServerName server: initial.getBackupMasters()) {
toKill.remove(server);
}
for (ServerName sn:toStart) {
try {
if(!clusterManager.isRunning(ServiceType.HBASE_MASTER, sn.getHostname(), sn.getPort())) {
LOG.info("Restoring cluster - starting initial backup master: " + sn.getHostAndPort());
startMaster(sn.getHostname(), sn.getPort());
}
} catch (IOException ex) {
deferred.add(ex);
}
}
for (ServerName sn:toKill) {
try {
if(clusterManager.isRunning(ServiceType.HBASE_MASTER, sn.getHostname(), sn.getPort())) {
LOG.info("Restoring cluster - stopping backup master: " + sn.getHostAndPort());
stopMaster(sn);
}
} catch (IOException ex) {
deferred.add(ex);
}
}
}
if (!deferred.isEmpty()) {
LOG.warn("Restoring cluster - restoring region servers reported "
+ deferred.size() + " errors:");
for (int i=0; i<deferred.size() && i < 3; i++) {
LOG.warn(deferred.get(i));
}
}
return deferred.isEmpty();
}
private static class ServerNameIgnoreStartCodeComparator implements Comparator<ServerName> {
@Override
public int compare(ServerName o1, ServerName o2) {
int compare = o1.getHostname().compareToIgnoreCase(o2.getHostname());
if (compare != 0) return compare;
compare = o1.getPort() - o2.getPort();
if (compare != 0) return compare;
return 0;
}
}
protected boolean restoreRegionServers(ClusterStatus initial, ClusterStatus current) {
Set<ServerName> toStart = new TreeSet<>(new ServerNameIgnoreStartCodeComparator());
Set<ServerName> toKill = new TreeSet<>(new ServerNameIgnoreStartCodeComparator());
toStart.addAll(initial.getServers());
toKill.addAll(current.getServers());
ServerName master = initial.getMaster();
for (ServerName server : current.getServers()) {
toStart.remove(server);
}
for (ServerName server: initial.getServers()) {
toKill.remove(server);
}
List<IOException> deferred = new ArrayList<>();
for(ServerName sn:toStart) {
try {
if (!clusterManager.isRunning(ServiceType.HBASE_REGIONSERVER,
sn.getHostname(),
sn.getPort())
&& master.getPort() != sn.getPort()) {
LOG.info("Restoring cluster - starting initial region server: " + sn.getHostAndPort());
startRegionServer(sn.getHostname(), sn.getPort());
}
} catch (IOException ex) {
deferred.add(ex);
}
}
for(ServerName sn:toKill) {
try {
if (clusterManager.isRunning(ServiceType.HBASE_REGIONSERVER,
sn.getHostname(),
sn.getPort())
&& master.getPort() != sn.getPort()){
LOG.info("Restoring cluster - stopping initial region server: " + sn.getHostAndPort());
stopRegionServer(sn);
}
} catch (IOException ex) {
deferred.add(ex);
}
}
if (!deferred.isEmpty()) {
LOG.warn("Restoring cluster - restoring region servers reported "
+ deferred.size() + " errors:");
for (int i=0; i<deferred.size() && i < 3; i++) {
LOG.warn(deferred.get(i));
}
}
return deferred.isEmpty();
}
protected boolean restoreAdmin() throws IOException {
// While restoring above, if the HBase Master which was initially the Active one, was down
// and the restore put the cluster back to Initial configuration, HAdmin instance will need
// to refresh its connections (otherwise it will return incorrect information) or we can
// point it to new instance.
try {
admin.close();
} catch (IOException ioe) {
LOG.warn("While closing the old connection", ioe);
}
this.admin = this.connection.getAdmin();
LOG.info("Added new HBaseAdmin");
return true;
}
}
| apache-2.0 |
infochimps-forks/ezbake-platform-services | groups/service/src/test/java/ezbake/groups/service/EzGroupsIT.java | 10747 | /* Copyright (C) 2013-2015 Computer Sciences Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
package ezbake.groups.service;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.thrift.TException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import ezbake.base.thrift.EzSecurityToken;
import ezbake.groups.graph.UserGroupPermissionsWrapper;
import ezbake.groups.thrift.EzGroups;
import ezbake.groups.thrift.EzGroupsConstants;
import ezbake.groups.thrift.Group;
import ezbake.groups.thrift.GroupInheritancePermissions;
import ezbake.groups.thrift.User;
import ezbake.groups.thrift.UserType;
import ezbake.security.test.MockEzSecurityToken;
public class EzGroupsIT extends GroupsServiceCommonITSetup {
private EzGroups.Client client;
private final String adminId = "steve";
private EzSecurityToken adminToken;
@Before
public void setUp() throws TException {
client = clientPool.getClient(EzGroupsConstants.SERVICE_NAME, EzGroups.Client.class);
adminToken = MockEzSecurityToken.getMockUserToken(
adminId, "", Sets.<String>newHashSet(), Maps.<String, List<String>>newHashMap(), true);
}
@Test
public void testPing() throws TException {
assertTrue(client.ping());
}
@Test
public void testCreateUserAndGetUser() throws TException {
client.createUser(adminToken, adminId, adminId);
User user = client.getUser(adminToken, UserType.USER, adminId);
User expectedUser = new User();
expectedUser.setPrincipal(adminId);
expectedUser.setName(adminId);
expectedUser.setIsActive(true);
assertEquals(expectedUser, user);
}
@Test
public void testCreateGroup() throws TException {
client.createUser(adminToken, adminId, adminId);
// create the group
final String createGroupId = "someGroup";
long groupId = client.createGroup(adminToken, null, createGroupId, new GroupInheritancePermissions());
// get the group
final Group createGroup = client.getGroup(adminToken, createGroupId);
// verify it is the group we created
final Group expectedCreateGroup = getExpectedGroup(groupId, createGroupId, createGroupId);
assertEquals(createGroup, expectedCreateGroup);
}
@Test
public void testCreateAndGetGroup() throws TException {
client.createUser(adminToken, adminId, adminId);
final long expectedIndex = 4; // ?
final String createAndGetGroupId = "alexGroup";
final Group myGroup =
client.createAndGetGroup(adminToken, null, createAndGetGroupId, new GroupInheritancePermissions());
final Group expectedMyGroup = getExpectedGroup(expectedIndex, createAndGetGroupId, createAndGetGroupId);
assertEquals(expectedMyGroup, myGroup);
}
@Test
public void testGetGroupsMask() throws TException {
long userIndex = client.createUser(adminToken, adminId, adminId);
// IDs referred to when calling getGroupsMask
final String groupId1 = "aGroup";
final String groupId3 = "cGroup";
final String appUserId = "appUser";
// Create groups, create one in the middle we wont get the index for
long groupIndex1 = client.createGroup(adminToken, null, groupId1, new GroupInheritancePermissions());
client.createGroup(adminToken, null, "bGroup", new GroupInheritancePermissions());
long groupIndex3 = client.createGroup(adminToken, null, groupId3, new GroupInheritancePermissions());
// create app user we'll get the index for
long appUserIndex = client.createAppUser(adminToken, appUserId, appUserId);
Set<Long> expectedIndicess = Sets.newHashSet(userIndex, groupIndex1, groupIndex3, appUserIndex);
Set<Long> actualIndices = client.getGroupsMask(
adminToken, Sets.newHashSet(groupId1, groupId3), Sets.newHashSet(adminId), Sets.newHashSet(appUserId));
assertEquals(expectedIndicess, actualIndices);
}
@Test
public void testRenameGroup() throws TException {
client.createUser(adminToken, adminId, adminId);
// create several groups and track their names
final String renameThisGroupName = "renameMe";
final String childGroupOneFriendlyName = "c1";
final String childGroupOneFullyQualifiedName =
String.format("%s.%s", renameThisGroupName, childGroupOneFriendlyName);
final String childGroupTwoName = "c2";
client.createGroup(adminToken, null, renameThisGroupName, new GroupInheritancePermissions());
client.createGroup(
adminToken, renameThisGroupName, childGroupOneFriendlyName, new GroupInheritancePermissions());
final long expectedId = client.createGroup(
adminToken, childGroupOneFullyQualifiedName, childGroupTwoName, new GroupInheritancePermissions());
// use the client to change the group name of the parent of the groups created in this section
final String myNewName = "myNewName";
client.changeGroupName(adminToken, renameThisGroupName, myNewName);
final String expectedNewName =
childGroupOneFullyQualifiedName.replace(renameThisGroupName, myNewName) + '.' + childGroupTwoName;
// verify it is the renamed group we expected
final Group renamedGrandChild = client.getGroup(adminToken, expectedNewName);
final Group expectedRenamedGroup = getExpectedGroup(expectedId, expectedNewName, childGroupTwoName);
assertEquals(expectedRenamedGroup, renamedGrandChild);
}
/**
* Add some groups, users, and app users. Test the different combinations of app and user return the expected values
* based on their users' permissions on the groups.
*/
@Test
public void testGetGroupNamesByIndices() throws Exception{
// mock token validity.issuedTo() field comes populated with 'client' EzGroups looks for a group name of
// 'client' if we don't add this we get an exception due to group not found
client.createAppUser(adminToken, "client", "client");
final Long userId1 = client.createUser(adminToken, adminId, adminId);
final String gropu1Id = "group1";
final String gropu2Id = "group2";
final String user = "jar_jar";
final Long userId2 = client.createUser(adminToken, user, user);
final String appUser = "AnAppUser";
client.createAppUser(adminToken, appUser, appUser);
final EzSecurityToken userToken = MockEzSecurityToken.getMockUserToken(user);
// create two groups we will use in tests
final Group group1 = client.createAndGetGroup(adminToken, null, gropu1Id, new GroupInheritancePermissions());
final Group group2 = client.createAndGetGroup(
adminToken, gropu1Id, gropu2Id, new GroupInheritancePermissions(true, true, false, false, false));
// Group two inherits from group one. If we add the user to group one then the user will have permission on both
// group one and group two
client.addUserToGroup(adminToken, group1.getGroupName(), user, UserGroupPermissionsWrapper.ownerPermissions());
// We only add the app to group two, since group two does not inherit from group one, the app will only have
// permissions on group two.
client.addAppUserToGroup(
adminToken, group2.getGroupName(), appUser, UserGroupPermissionsWrapper.ownerPermissions());
// EzGroups indices we are going to request. We add in the indices Users here which, while map to something
// in EzGroups, they do not map to a group in EzGroups so should be ignored.
final Set<Long> requested = Sets.newHashSet(Sets.newHashSet(group1.getId(), group2.getId(), userId1, userId2));
final Map<Long, String> expected = EzGroupsService.getUnloadedGroupIndexToNameMap(requested);
// shouldn't get any groups back without the issuedTo field set on the token (identifies requesting app)
assertEquals(expected, client.getGroupNamesByIndices(userToken, requested));
// set token so that is issued to the app we created, this means that both the app
// and the user will have permission on group two, but only the user will have permission on group one.
// we should only get group two back.
userToken.getValidity().setIssuedTo(appUser);
expected.put(group2.getId(), group2.getGroupName());
assertEquals(expected, client.getGroupNamesByIndices(userToken, requested));
// If we add the app to the group its missing, then we can expect to get both groups back.
client.addAppUserToGroup(
adminToken, group1.getGroupName(), appUser, UserGroupPermissionsWrapper.ownerPermissions());
expected.put(group1.getId(), group1.getGroupName());
assertEquals(expected, client.getGroupNamesByIndices(userToken, requested));
// use the admin token to get all groups.
assertEquals(expected, client.getGroupNamesByIndices(adminToken, requested));
}
/**
* Builds a group object for comparison in tests.
*
* @param expectedIndex expected index of the group
* @param groupName group name
* @param friendlyGroupName friendly group name
* @return a newly build Group object with default values
*/
private Group getExpectedGroup(long expectedIndex, String groupName, String friendlyGroupName) {
final Group expectedMyGroup = new Group();
expectedMyGroup.setGroupName(groupName);
expectedMyGroup.setInheritancePermissions(new GroupInheritancePermissions());
expectedMyGroup.setFriendlyName(friendlyGroupName);
expectedMyGroup.setRequireOnlyUser(true);
expectedMyGroup.setRequireOnlyAPP(false);
expectedMyGroup.setIsActive(true);
expectedMyGroup.setId(expectedIndex);
return expectedMyGroup;
}
}
| apache-2.0 |
jvelilla/hypermedia-client-java | src/main/java/com/comcast/cim/rest/client/xhtml/XhtmlHttpClient.java | 1924 | /*
Copyright (C) 2011 Comcast Interactive Media, LLC ("Licensor").
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.comcast.cim.rest.client.xhtml;
import java.io.IOException;
import java.net.URL;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpUriRequest;
/**
* Convenience wrapper around {@link HttpClient} that asks for and parses
* XHTML responses from the server.
*/
public class XhtmlHttpClient {
private static final String ACCEPT_HEADER = "application/xhtml+xml,*/*;q=0.9";
private HttpClient httpClient;
private XhtmlResponseHandlerFactory xhtmlResponseHandlerFactory;
public XhtmlHttpClient(HttpClient hc, XhtmlResponseHandlerFactory xrhf) {
this.httpClient = hc;
this.xhtmlResponseHandlerFactory = xrhf;
}
/**
* Executes the given HTTP request and returns the next
* application state.
* @param req HTTP request to execute
* @return new application state
* @throws ClientProtocolException
* @throws IOException
*/
public XhtmlApplicationState execute(HttpUriRequest req)
throws ClientProtocolException, IOException {
req.setHeader("Accept",ACCEPT_HEADER);
URL context = new URL(req.getURI().toString());
XhtmlResponseHandler rh = xhtmlResponseHandlerFactory.get(context);
XhtmlApplicationState state = httpClient.execute(req, rh);
return state;
}
}
| apache-2.0 |
MichaelEvans/assertj-android | assertj-android-support-v4/src/main/java/org/assertj/android/support/v4/api/Assertions.java | 5766 | // Copyright 2014 Square, Inc.
//
// This class is generated. Do not modify directly!
package org.assertj.android.support.v4.api;
/** Assertions for testing Android classes. */
@SuppressWarnings("deprecation")
public final class Assertions {
public static org.assertj.android.support.v4.api.app.ActionBarDrawerToggleAssert assertThat(
android.support.v4.app.ActionBarDrawerToggle actual) {
return new org.assertj.android.support.v4.api.app.ActionBarDrawerToggleAssert(actual);
}
public static org.assertj.android.support.v4.api.app.DialogFragmentAssert assertThat(
android.support.v4.app.DialogFragment actual) {
return new org.assertj.android.support.v4.api.app.DialogFragmentAssert(actual);
}
public static org.assertj.android.support.v4.api.app.FragmentAssert assertThat(
android.support.v4.app.Fragment actual) {
return new org.assertj.android.support.v4.api.app.FragmentAssert(actual);
}
public static org.assertj.android.support.v4.api.app.FragmentManagerAssert assertThat(
android.support.v4.app.FragmentManager actual) {
return new org.assertj.android.support.v4.api.app.FragmentManagerAssert(actual);
}
public static org.assertj.android.support.v4.api.app.FragmentTransactionAssert assertThat(
android.support.v4.app.FragmentTransaction actual) {
return new org.assertj.android.support.v4.api.app.FragmentTransactionAssert(actual);
}
public static org.assertj.android.support.v4.api.app.ListFragmentAssert assertThat(
android.support.v4.app.ListFragment actual) {
return new org.assertj.android.support.v4.api.app.ListFragmentAssert(actual);
}
public static org.assertj.android.support.v4.api.app.LoaderManagerAssert assertThat(
android.support.v4.app.LoaderManager actual) {
return new org.assertj.android.support.v4.api.app.LoaderManagerAssert(actual);
}
public static org.assertj.android.support.v4.api.content.CursorLoaderAssert assertThat(
android.support.v4.content.CursorLoader actual) {
return new org.assertj.android.support.v4.api.content.CursorLoaderAssert(actual);
}
public static org.assertj.android.support.v4.api.content.LoaderAssert assertThat(
android.support.v4.content.Loader actual) {
return new org.assertj.android.support.v4.api.content.LoaderAssert(actual);
}
public static org.assertj.android.support.v4.api.media.TransportControllerAssert assertThat(
android.support.v4.media.TransportController actual) {
return new org.assertj.android.support.v4.api.media.TransportControllerAssert(actual);
}
public static org.assertj.android.support.v4.api.media.TransportMediatorAssert assertThat(
android.support.v4.media.TransportMediator actual) {
return new org.assertj.android.support.v4.api.media.TransportMediatorAssert(actual);
}
public static org.assertj.android.support.v4.api.print.PrintHelperAssert assertThat(
android.support.v4.print.PrintHelper actual) {
return new org.assertj.android.support.v4.api.print.PrintHelperAssert(actual);
}
public static org.assertj.android.support.v4.api.util.AtomicFileAssert assertThat(
android.support.v4.util.AtomicFile actual) {
return new org.assertj.android.support.v4.api.util.AtomicFileAssert(actual);
}
public static <E> org.assertj.android.support.v4.api.util.CircularArrayAssert<E> assertThat(
android.support.v4.util.CircularArray<E> actual) {
return new org.assertj.android.support.v4.api.util.CircularArrayAssert<>(actual);
}
public static org.assertj.android.support.v4.api.util.LongSparseArrayAssert assertThat(
android.support.v4.util.LongSparseArray actual) {
return new org.assertj.android.support.v4.api.util.LongSparseArrayAssert(actual);
}
public static <K, V> org.assertj.android.support.v4.api.util.LruCacheAssert<K, V> assertThat(
android.support.v4.util.LruCache<K, V> actual) {
return new org.assertj.android.support.v4.api.util.LruCacheAssert<>(actual);
}
public static <E> org.assertj.android.support.v4.api.util.SparseArrayCompatAssert<E> assertThat(
android.support.v4.util.SparseArrayCompat<E> actual) {
return new org.assertj.android.support.v4.api.util.SparseArrayCompatAssert<>(actual);
}
public static org.assertj.android.support.v4.api.view.PagerAdapterAssert assertThat(
android.support.v4.view.PagerAdapter actual) {
return new org.assertj.android.support.v4.api.view.PagerAdapterAssert(actual);
}
public static org.assertj.android.support.v4.api.view.ViewPagerAssert assertThat(
android.support.v4.view.ViewPager actual) {
return new org.assertj.android.support.v4.api.view.ViewPagerAssert(actual);
}
public static org.assertj.android.support.v4.api.widget.CursorAdapterAssert assertThat(
android.support.v4.widget.CursorAdapter actual) {
return new org.assertj.android.support.v4.api.widget.CursorAdapterAssert(actual);
}
public static org.assertj.android.support.v4.api.widget.SimpleCursorAdapterAssert assertThat(
android.support.v4.widget.SimpleCursorAdapter actual) {
return new org.assertj.android.support.v4.api.widget.SimpleCursorAdapterAssert(actual);
}
public static org.assertj.android.support.v4.api.widget.SlidingPaneLayoutAssert assertThat(
android.support.v4.widget.SlidingPaneLayout actual) {
return new org.assertj.android.support.v4.api.widget.SlidingPaneLayoutAssert(actual);
}
public static org.assertj.android.support.v4.api.widget.SwipeRefreshLayoutAssert assertThat(
android.support.v4.widget.SwipeRefreshLayout actual) {
return new org.assertj.android.support.v4.api.widget.SwipeRefreshLayoutAssert(actual);
}
private Assertions() {
throw new AssertionError("No instances.");
}
}
| apache-2.0 |
nickperez1285/truck-hunt-hackathon | client/stemapp/themes/TabTheme/widgets/Header/nls/et/strings.js | 235 | define(
({
_widgetLabel: "Päis",
signin: "Logi sisse",
signout: "Logi välja",
about: "Info",
signInTo: "Logi sisse",
cantSignOutTip: "See funktsioon pole eelvaaterežiimis rakendatav."
})
);
| apache-2.0 |
Elopteryx/paint-upload | upload-parser-tests/src/test/java/com/github/elopteryx/upload/internal/Base64EncodingTest.java | 1596 | package com.github.elopteryx.upload.internal;
import static java.nio.charset.StandardCharsets.US_ASCII;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.nio.ByteBuffer;
class Base64EncodingTest {
@Test
void these_values_should_work() throws IOException {
checkEncoding("", "");
checkEncoding("f", "Zg==");
checkEncoding("fo", "Zm8=");
checkEncoding("foo", "Zm9v");
checkEncoding("foob", "Zm9vYg==");
checkEncoding("fooba", "Zm9vYmE=");
checkEncoding("foobar", "Zm9vYmFy");
}
@Test
void must_throw_exception_on_invalid_data() {
assertThrows(IOException.class, () -> checkEncoding("f", "Zg=�="));
}
private static void checkEncoding(final String original, final String encoded) throws IOException {
final var encoding = new MultipartParser.Base64Encoding(1024);
encoding.handle(new MultipartParser.PartHandler() {
@Override
public void beginPart(final Headers headers) {
// No-op
}
@Override
public void data(final ByteBuffer buffer) {
final var parserResult = new String(buffer.array(), US_ASCII).trim();
assertEquals(parserResult, original);
}
@Override
public void endPart() {
// No-op
}
}, ByteBuffer.wrap(encoded.getBytes(US_ASCII)));
}
}
| apache-2.0 |
justinleet/incubator-metron | metron-platform/metron-common/src/test/java/org/apache/metron/common/utils/RuntimeErrorsTest.java | 2546 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.common.utils;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.IOException;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.nullValue;
public class RuntimeErrorsTest {
@Rule
public ExpectedException exception = ExpectedException.none();
@Test
public void illegal_arg_throws_exception_with_reason() throws Exception {
exception.expect(IllegalArgumentException.class);
exception.expectMessage("illegal arg happened");
exception.expectCause(nullValue(Throwable.class));
RuntimeErrors.ILLEGAL_ARG.throwRuntime("illegal arg happened");
}
@Test
public void illegal_arg_throws_exception_with_reason_and_cause() throws Exception {
exception.expect(IllegalArgumentException.class);
exception.expectMessage("illegal arg happened");
exception.expectCause(instanceOf(IOException.class));
RuntimeErrors.ILLEGAL_ARG.throwRuntime("illegal arg happened", new IOException("bad io"));
}
@Test
public void illegal_state_throws_exception_with_reason() throws Exception {
exception.expect(IllegalStateException.class);
exception.expectMessage("illegal state happened");
exception.expectCause(nullValue(Throwable.class));
RuntimeErrors.ILLEGAL_STATE.throwRuntime("illegal state happened");
}
@Test
public void illegal_state_throws_exception_with_reason_and_cause() throws Exception {
exception.expect(IllegalStateException.class);
exception.expectMessage("illegal state happened");
exception.expectCause(instanceOf(IOException.class));
RuntimeErrors.ILLEGAL_STATE.throwRuntime("illegal state happened", new IOException("bad io"));
}
}
| apache-2.0 |
tungvx/deploy | .google_appengine/lib/protorpc/protorpc/protobuf.py | 9491 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Protocol buffer support for message types.
For more details about protocol buffer encoding and decoding please see:
http://code.google.com/apis/protocolbuffers/docs/encoding.html
Public Exceptions:
DecodeError: Raised when a decode error occurs from incorrect protobuf format.
Public Functions:
encode_message: Encodes a message in to a protocol buffer string.
decode_message: Decode from a protocol buffer string to a message.
"""
__author__ = 'rafek@google.com (Rafe Kaplan)'
import array
import cStringIO
from protorpc import messages
# TODO(rafek): Do something about this dependency maybe.
from google.net.proto import ProtocolBuffer
__all__ = ['CONTENT_TYPE',
'encode_message',
'decode_message',
]
CONTENT_TYPE = 'application/x-google-protobuf'
class _Encoder(ProtocolBuffer.Encoder):
"""Extension of protocol buffer encoder.
Original protocol buffer encoder does not have complete set of methods
for handling required encoding. This class adds them.
"""
# TODO(rafek): Implement the missing encoding types.
def no_encoding(self, value):
"""No encoding available for type.
Args:
value: Value to encode.
Raises:
NotImplementedError at all times.
"""
raise NotImplementedError()
def encode_enum(self, value):
"""Encode an enum value.
Args:
value: Enum to encode.
"""
self.putVarInt32(value.number)
def encode_message(self, value):
"""Encode a Message in to an embedded message.
Args:
value: Message instance to encode.
"""
self.putPrefixedString(encode_message(value))
def encode_unicode_string(self, value):
"""Helper to properly pb encode unicode strings to UTF-8.
Args:
value: String value to encode.
"""
if isinstance(value, unicode):
value = value.encode('utf-8')
self.putPrefixedString(value)
class _Decoder(ProtocolBuffer.Decoder):
"""Extension of protocol buffer decoder.
Original protocol buffer decoder does not have complete set of methods
for handling required decoding. This class adds them.
"""
# TODO(rafek): Implement the missing encoding types.
def no_decoding(self):
"""No decoding available for type.
Raises:
NotImplementedError at all times.
"""
raise NotImplementedError()
def decode_string(self):
"""Decode a unicode string.
Returns:
Next value in stream as a unicode string.
"""
return self.getPrefixedString().decode('UTF-8')
def decode_boolean(self):
"""Decode a boolean value.
Returns:
Next value in stream as a boolean.
"""
return bool(self.getBoolean())
# Number of bits used to describe a protocol buffer bits used for the variant.
_WIRE_TYPE_BITS = 3
_WIRE_TYPE_MASK = 7
# Maps variant to underlying wire type. Many variants map to same type.
_VARIANT_TO_WIRE_TYPE = {
messages.Variant.DOUBLE: _Encoder.DOUBLE,
messages.Variant.FLOAT: _Encoder.FLOAT,
messages.Variant.INT64: _Encoder.NUMERIC,
messages.Variant.UINT64: _Encoder.NUMERIC,
messages.Variant.INT32: _Encoder.NUMERIC,
messages.Variant.BOOL: _Encoder.NUMERIC,
messages.Variant.STRING: _Encoder.STRING,
messages.Variant.MESSAGE: _Encoder.STRING,
messages.Variant.BYTES: _Encoder.STRING,
messages.Variant.UINT32: _Encoder.NUMERIC,
messages.Variant.ENUM: _Encoder.NUMERIC,
messages.Variant.SINT32: _Encoder.NUMERIC,
messages.Variant.SINT64: _Encoder.NUMERIC,
}
# Maps variant to encoder method.
_VARIANT_TO_ENCODER_MAP = {
messages.Variant.DOUBLE: _Encoder.putDouble,
messages.Variant.FLOAT: _Encoder.putFloat,
messages.Variant.INT64: _Encoder.putVarInt64,
messages.Variant.UINT64: _Encoder.putVarUint64,
messages.Variant.INT32: _Encoder.putVarInt32,
messages.Variant.BOOL: _Encoder.putBoolean,
messages.Variant.STRING: _Encoder.encode_unicode_string,
messages.Variant.MESSAGE: _Encoder.encode_message,
messages.Variant.BYTES: _Encoder.encode_unicode_string,
messages.Variant.UINT32: _Encoder.no_encoding,
messages.Variant.ENUM: _Encoder.encode_enum,
messages.Variant.SINT32: _Encoder.no_encoding,
messages.Variant.SINT64: _Encoder.no_encoding,
}
# Basic wire format decoders. Used for skipping unknown values.
_WIRE_TYPE_TO_DECODER_MAP = {
_Encoder.NUMERIC: _Decoder.getVarInt32,
_Encoder.DOUBLE: _Decoder.getDouble,
_Encoder.STRING: _Decoder.getPrefixedString,
_Encoder.FLOAT: _Decoder.getFloat,
}
# Wire type to name mapping for error messages.
_WIRE_TYPE_NAME = {
_Encoder.NUMERIC: 'NUMERIC',
_Encoder.DOUBLE: 'DOUBLE',
_Encoder.STRING: 'STRING',
_Encoder.FLOAT: 'FLOAT',
}
# Maps variant to decoder method.
_VARIANT_TO_DECODER_MAP = {
messages.Variant.DOUBLE: _Decoder.getDouble,
messages.Variant.FLOAT: _Decoder.getFloat,
messages.Variant.INT64: _Decoder.getVarInt64,
messages.Variant.UINT64: _Decoder.getVarUint64,
messages.Variant.INT32: _Decoder.getVarInt32,
messages.Variant.BOOL: _Decoder.decode_boolean,
messages.Variant.STRING: _Decoder.decode_string,
messages.Variant.MESSAGE: _Decoder.getPrefixedString,
messages.Variant.BYTES: _Decoder.getPrefixedString,
messages.Variant.UINT32: _Decoder.no_decoding,
messages.Variant.ENUM: _Decoder.getVarInt32,
messages.Variant.SINT32: _Decoder.no_decoding,
messages.Variant.SINT64: _Decoder.no_decoding,
}
def encode_message(message):
"""Encode Message instance to protocol buffer.
Args:
Message instance to encode in to protocol buffer.
Returns:
String encoding of Message instance in protocol buffer format.
Raises:
messages.ValidationError if message is not initialized.
"""
message.check_initialized()
encoder = _Encoder()
for field in sorted(message.all_fields(), key=lambda field: field.number):
value = message.get_assigned_value(field.name)
if value is not None:
# Encode tag.
tag = ((field.number << _WIRE_TYPE_BITS) |
_VARIANT_TO_WIRE_TYPE[field.variant])
# Write value to wire.
if field.repeated:
values = value
else:
values = [value]
for next in values:
encoder.putVarInt32(tag)
field_encoder = _VARIANT_TO_ENCODER_MAP[field.variant]
field_encoder(encoder, next)
return encoder.buffer().tostring()
def decode_message(message_type, encoded_message):
"""Decode protocol buffer to Message instance.
Args:
message_type: Message type to decode data to.
encoded_message: Encoded version of message as string.
Returns:
Decoded instance of message_type.
Raises:
DecodeError if an error occurs during decoding, such as incompatible
wire format for a field.
messages.ValidationError if merged message is not initialized.
"""
message = message_type()
message_array = array.array('B')
message_array.fromstring(encoded_message)
try:
decoder = _Decoder(message_array, 0, len(message_array))
while decoder.avail() > 0:
# Decode tag and variant information.
encoded_tag = decoder.getVarInt32()
tag = encoded_tag >> _WIRE_TYPE_BITS
wire_type = encoded_tag & _WIRE_TYPE_MASK
try:
found_wire_type_decoder = _WIRE_TYPE_TO_DECODER_MAP[wire_type]
except:
raise messages.DecodeError('No such wire type %d' % wire_type)
if tag < 1:
raise messages.DecodeError('Invalid tag value %d' % tag)
try:
field = message.field_by_number(tag)
except KeyError:
# Unexpected tags are ok, just ignored unless below 0.
field = None
wire_type_decoder = found_wire_type_decoder
else:
expected_wire_type = _VARIANT_TO_WIRE_TYPE[field.variant]
if expected_wire_type != wire_type:
raise messages.DecodeError('Expected wire type %s but found %s' % (
_WIRE_TYPE_NAME[expected_wire_type],
_WIRE_TYPE_NAME[wire_type]))
wire_type_decoder = _VARIANT_TO_DECODER_MAP[field.variant]
value = wire_type_decoder(decoder)
# Skip additional processing if unknown field.
if not field:
continue
# Special case Enum and Message types.
if isinstance(field, messages.EnumField):
value = field.type(value)
elif isinstance(field, messages.MessageField):
nested_message = decode_message(field.type, value)
value = nested_message
# Merge value in to message.
if field.repeated:
values = getattr(message, field.name)
if values is None:
setattr(message, field.name, [value])
else:
values.append(value)
else:
setattr(message, field.name, value)
except ProtocolBuffer.ProtocolBufferDecodeError, err:
raise messages.DecodeError('Decoding error: %s' % str(err))
message.check_initialized()
return message
| apache-2.0 |
airhorns/CitrusMobile | Resources/vendor/spazcore/libs/spaztemplate.js | 1250 | /*jslint
browser: true,
nomen: false,
debug: true,
forin: true,
plusplus: false,
undef: true,
white: false,
onevar: false
*/
var sc;
/**
* SpazTemplate
* designed for fast templating functions
* @class SpazTemplate
* @constructor
*/
function SpazTemplate() {
this._tpls = {};
}
/**
* @param string name the name to call the method with in parseTemplate
* @param method function the template methid. Should take one param for input data, returns string
*/
SpazTemplate.prototype.addTemplateMethod = function(name, method) {
this._tpls[name] = method;
};
/**
* @param string methodname the template method to call.
* @param mixed data data to be used by the template method
* @return string;
*/
SpazTemplate.prototype.parseTemplate = function(methodname, data) {
var parsed = this._tpls[methodname](data);
return parsed;
};
/**
* @param string methodname the template method to call
* @param array data_array an array of objects to pass to the template method
* return string
*/
SpazTemplate.prototype.parseArray = function(methodname, data_array) {
var parsed = '';
for(var k=0; k < data_array.length; k++) {
parsed += this.parseTemplate(methodname, data_array[k]);
}
return parsed;
};
| apache-2.0 |
golang/mock | mockgen/internal/tests/import_embedded_interface/bugreport_test.go | 952 | // Copyright 2020 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package bugreport
import (
"testing"
"github.com/golang/mock/gomock"
)
// TestValidInterface assesses whether or not the generated mock is valid
func TestValidInterface(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
s := NewMockSource(ctrl)
s.EXPECT().Ersatz().Return("")
s.EXPECT().OtherErsatz().Return("")
CallForeignMethod(s)
}
| apache-2.0 |
arenadata/ambari | ambari-server/src/main/java/org/apache/ambari/server/topology/HostGroupImpl.java | 8232 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distribut
* ed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.topology;
import com.google.gson.Gson;
import org.apache.ambari.server.controller.internal.ProvisionAction;
import org.apache.ambari.server.controller.internal.Stack;
import org.apache.ambari.server.orm.entities.HostGroupComponentEntity;
import org.apache.ambari.server.orm.entities.HostGroupConfigEntity;
import org.apache.ambari.server.orm.entities.HostGroupEntity;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Host Group implementation.
*/
public class HostGroupImpl implements HostGroup {
/**
* host group name
*/
private String name;
/**
* blueprint name
*/
private String blueprintName;
/**
* components contained in the host group
*/
private Map<String, Component> components = new HashMap<String, Component>();
/**
* map of service to components for the host group
*/
private Map<String, Set<String>> componentsForService = new HashMap<String, Set<String>>();
/**
* configuration
*/
private Configuration configuration = null;
private boolean containsMasterComponent = false;
private Stack stack;
private String cardinality = "NOT SPECIFIED";
public HostGroupImpl(HostGroupEntity entity, String blueprintName, Stack stack) {
this.name = entity.getName();
this.cardinality = entity.getCardinality();
this.blueprintName = blueprintName;
this.stack = stack;
parseComponents(entity);
parseConfigurations(entity);
}
public HostGroupImpl(String name, String bpName, Stack stack, Collection<Component> components, Configuration configuration, String cardinality) {
this.name = name;
this.blueprintName = bpName;
this.stack = stack;
// process each component
for (Component component : components) {
addComponent(component.getName(), component.getProvisionAction());
}
this.configuration = configuration;
if (cardinality != null && ! cardinality.equals("null")) {
this.cardinality = cardinality;
}
}
@Override
public String getName() {
return name;
}
//todo: currently not qualifying host group name
@Override
public String getFullyQualifiedName() {
return String.format("%s:%s", blueprintName, getName());
}
//todo: currently not qualifying host group name
public static String formatAbsoluteName(String bpName, String hgName) {
return String.format("%s:%s", bpName, hgName);
}
@Override
public Collection<Component> getComponents() {
return components.values();
}
@Override
public Collection<String> getComponentNames() {
return components.keySet();
}
@Override
public Collection<String> getComponentNames(ProvisionAction provisionAction) {
Set<String> setOfComponentNames = new HashSet<String>();
for (String componentName : components.keySet()) {
Component component = components.get(componentName);
if ( (component.getProvisionAction() != null) && (component.getProvisionAction() == provisionAction) ) {
setOfComponentNames.add(componentName);
}
}
return setOfComponentNames;
}
/**
* Get the services which are deployed to this host group.
*
* @return collection of services which have components in this host group
*/
@Override
public Collection<String> getServices() {
return componentsForService.keySet();
}
/**
* Add a component to the host group.
*
* @param component component to add
*
* @return true if component was added; false if component already existed
*/
@Override
public boolean addComponent(String component) {
return this.addComponent(component, null);
}
/**
* Add a component with the specified provision action to the
* host group.
*
* @param component component name
* @param provisionAction provision action for this component
*
* @return true if component was added; false if component already existed
*/
public boolean addComponent(String component, ProvisionAction provisionAction) {
boolean added;
if (!components.containsKey(component)) {
components.put(component, new Component(component, provisionAction));
added = true;
} else {
added = false;
}
if (stack.isMasterComponent(component)) {
containsMasterComponent = true;
}
if (added) {
String service = stack.getServiceForComponent(component);
if (service != null) {
// an example of a component without a service in the stack is AMBARI_SERVER
Set<String> serviceComponents = componentsForService.get(service);
if (serviceComponents == null) {
serviceComponents = new HashSet<String>();
componentsForService.put(service, serviceComponents);
}
serviceComponents.add(component);
}
}
return added;
}
/**
* Get the components for the specified service which are associated with the host group.
*
* @param service service name
*
* @return set of component names
*/
@Override
public Collection<String> getComponents(String service) {
return componentsForService.containsKey(service) ?
new HashSet<String>(componentsForService.get(service)) :
Collections.<String>emptySet();
}
/**
* Get this host groups configuration.
*
* @return configuration instance
*/
@Override
public Configuration getConfiguration() {
return configuration;
}
/**
* Get the associated blueprint name.
*
* @return associated blueprint name
*/
@Override
public String getBlueprintName() {
return blueprintName;
}
@Override
public boolean containsMasterComponent() {
return containsMasterComponent;
}
@Override
public Stack getStack() {
return stack;
}
@Override
public String getCardinality() {
return cardinality;
}
/**
* Parse component information.
*/
private void parseComponents(HostGroupEntity entity) {
for (HostGroupComponentEntity componentEntity : entity.getComponents() ) {
if (componentEntity.getProvisionAction() != null) {
addComponent(componentEntity.getName(), ProvisionAction.valueOf(componentEntity.getProvisionAction()));
} else {
addComponent(componentEntity.getName());
}
}
}
/**
* Parse host group configurations.
*/
//todo: use ConfigurationFactory
private void parseConfigurations(HostGroupEntity entity) {
Map<String, Map<String, String>> config = new HashMap<String, Map<String, String>>();
Gson jsonSerializer = new Gson();
for (HostGroupConfigEntity configEntity : entity.getConfigurations()) {
String type = configEntity.getType();
Map<String, String> typeProperties = config.get(type);
if ( typeProperties == null) {
typeProperties = new HashMap<String, String>();
config.put(type, typeProperties);
}
Map<String, String> propertyMap = jsonSerializer.<Map<String, String>>fromJson(
configEntity.getConfigData(), Map.class);
if (propertyMap != null) {
typeProperties.putAll(propertyMap);
}
}
//todo: parse attributes
Map<String, Map<String, Map<String, String>>> attributes = new HashMap<String, Map<String, Map<String, String>>>();
configuration = new Configuration(config, attributes);
}
}
| apache-2.0 |
CodeSmell/camel | core/camel-core/src/test/java/org/apache/camel/component/bean/MethodCallBeanRefMethodNotFoundTest.java | 2302 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.bean;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.FailedToCreateRouteException;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.impl.JndiRegistry;
import org.junit.Test;
public class MethodCallBeanRefMethodNotFoundTest extends ContextTestSupport {
@Override
protected JndiRegistry createRegistry() throws Exception {
JndiRegistry jndi = super.createRegistry();
jndi.bind("foo", new MyFooBean());
return jndi;
}
@Test
public void testMethodCallBeanRefMethodNotFound() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:a").routeId("a").split().method("foo", "hello").to("mock:a");
from("direct:b").routeId("b").split().method("foo", "bye").to("mock:b");
}
});
try {
context.start();
fail("Should have thrown exception");
} catch (Exception e) {
FailedToCreateRouteException failed = assertIsInstanceOf(FailedToCreateRouteException.class, e);
assertEquals("b", failed.getRouteId());
MethodNotFoundException cause = assertIsInstanceOf(MethodNotFoundException.class, e.getCause());
assertEquals("bye", cause.getMethodName());
}
}
@Override
public boolean isUseRouteBuilder() {
return false;
}
}
| apache-2.0 |
fkolacek/FIT-VUT | bp-jenkins-scripts/tools/eval.php | 1942 | #!/usr/bin/php
<?php
require "./db.php";
if(!isset($argv[1]))
die("Usage: ".$argv[0]." [scanner] [path]\n");
$NAME = $argv[1];
$PATH = (isset($argv[2])? $argv[2] : "results");
$dirHandler = opendir($PATH) or die("Cannot open input dir: ".$PATH);
$attacks = Array();
while($file = readdir($dirHandler)){
if(preg_match("#".$NAME."#", $file)){
$fileHandler = fopen($PATH."/".$file, "r") or die("Cannot open input file: ".$file);
while($line = fgets($fileHandler)){
switch($NAME){
case "wapiti":
if(preg_match("#^.+path\": \"/([^\"]+)\"#", $line, $parts) == 1)
$attacks[] = $parts[1];
break;
case "w3af":
if(preg_match("#,http://.+/(wavsep.+\.jsp)#", $line, $parts) == 1)
$attacks[] = $parts[1];
break;
case "revok":
if(preg_match("#^.+(wavsep.+\.jsp)#", $line, $parts) == 1)
$attacks[] = $parts[1];
default:
break;
}
}
fclose($fileHandler);
}
}
$attacksDB = Array();
$dbQuery = mysql_query("SELECT id, link FROM attacks") or die(mysql_error($dbHandler));
while($dbResult = mysql_fetch_array($dbQuery)){
$attacksDB[($dbResult['link'])] = $dbResult['id'];
}
echo "[*] Grabbed ".count($attacks)." vulnerabilities\n";
mysql_query(sprintf("INSERT INTO tests (scanner, date) VALUES ('%s', '%s')", mysql_real_escape_string($NAME, $dbHandler), date('Y-m-d H:i:s')), $dbHandler) or die(mysql_error($dbHandler));
$testID = mysql_insert_id($dbHandler);
echo "[*] Updating db with test ID: ".$testID."\n";
foreach($attacksDB as $link=>$id){
if(in_array($link, $attacks)){
$query = sprintf("INSERT INTO hits (scannerID, attackID) VALUES (%d, %d)", $testID, $id);
mysql_query($query, $dbHandler) or die(mysql_error($dbHandler));
//echo $query."\n";
}
}
echo "[*] Done\n";
//echo implode("\n", $attacks);
//echo "\n";
closedir($dirHandler);
| apache-2.0 |
prepare/FO.NET | src/Core/Fo/ListProperty.cs | 1082 | //Apache2, 2017, WinterDev
//Apache2, 2009, griffm, FO.NET
using System.Collections;
namespace Fonet.Fo
{
internal class ListProperty : Property
{
internal class Maker : PropertyMaker
{
public Maker(string name) : base(name) { }
public override Property ConvertProperty(
Property p, PropertyList propertyList, FObj fo)
{
if (p is ListProperty)
{
return p;
}
else
{
return new ListProperty(p);
}
}
}
protected ArrayList list;
public ListProperty(Property prop)
{
list = new ArrayList();
list.Add(prop);
}
public void addProperty(Property prop)
{
list.Add(prop);
}
public override ArrayList GetList()
{
return list;
}
public override object GetObject()
{
return list;
}
}
} | apache-2.0 |
garora/WebHooks | src/Microsoft.AspNet.WebHooks.Receivers.VSTS/Payloads/BasePayload.cs | 2652 | // Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using Newtonsoft.Json;
namespace Microsoft.AspNet.WebHooks.Payloads
{
/// <summary>
/// Root object of payload sent for all types of events.
/// </summary>
/// <typeparam name="T">Type of resource within payload which differs depending on '<c>eventType</c>' field</typeparam>
public abstract class BasePayload<T> where T : BaseResource
{
/// <summary>
/// Gets the subscription identifier which triggered the event.
/// </summary>
[JsonProperty("subscriptionId")]
public string SubscriptionId { get; set; }
/// <summary>
/// Gets the notification identifier within subscription.
/// </summary>
[JsonProperty("notificationId")]
public int NotificationId { get; set; }
/// <summary>
/// Gets the identifier of HTTP request.
/// </summary>
[JsonProperty("id")]
public string Id { get; set; }
/// <summary>
/// Gets the type of the event.
/// </summary>
[JsonProperty("eventType")]
public string EventType { get; set; }
/// <summary>
/// Gets the publisher identifier.
/// </summary>
[JsonProperty("publisherId")]
public string PublisherId { get; set; }
/// <summary>
/// Gets the message which describes the event.
/// </summary>
[JsonProperty("message")]
public PayloadMessage Message { get; set; }
/// <summary>
/// Gets the detailed message which describes the event.
/// </summary>
[JsonProperty("detailedMessage")]
public PayloadMessage DetailedMessage { get; set; }
/// <summary>
/// Gets the resource itself - data associated with corresponding event.
/// </summary>
[JsonProperty("resource")]
public T Resource { get; set; }
/// <summary>
/// Gets the resource version.
/// </summary>
[JsonProperty("resourceVersion")]
public string ResourceVersion { get; set; }
/// <summary>
/// Gets the resource containers.
/// </summary>
[JsonProperty("resourceContainers")]
public PayloadResourceContainers ResourceContainers { get; set; }
/// <summary>
/// Gets the date when HTTP request was created.
/// </summary>
[JsonProperty("createdDate")]
public DateTime CreatedDate { get; set; }
}
}
| apache-2.0 |
serendipiddy/openflow-performance-visualizer | testingScripts/generate_offline_topology.py | 6359 | import sys, getopt, random, simplejson as json, copy, binascii
existing_macs = list()
debug = False
def generate_mac():
new_mac = list()
while (not new_mac) or (new_mac in existing_macs):
new_mac = list()
for i in range(6):
a = bytearray(random.getrandbits(8) for i in range(1))
new_mac.append(binascii.b2a_hex(a))
new_mac = ':'.join(new_mac)
return new_mac
class Switch:
def __init__(self,dpid):
self.dpid = "%0.16d" % dpid
self.port_count = 0
self.ports = list()
self.links = list()
if (debug): print (' new Switch %s' % dpid)
def addPort(self):
self.port_count += 1
id = '%0.8d' % self.port_count
p = Port(id,self.dpid)
self.ports.append(p)
if (debug): print (' adding port_no:%s on dpid:%s' % (self.port_count,self.dpid.replace('0','')))
return p
def connectTo(self, switch):
port1 = self.addPort()
port2 = switch.addPort()
link = Link(port1, port2)
self.links.append(link)
switch.links.append(link)
def __str__(self):
return self.dpid
class Port:
def __init__(self, id, dpid):
self.port_no = id
self.dpid = dpid
self.name = 's%s-eth%s' % (dpid.replace('0',''), id.replace('0',''))
self.mac = generate_mac()
if (debug): print (' new Port %s' % self.name)
class Link:
def __init__(self,port1,port2):
self.port1 = port1
self.port2 = port2
self.write_count = 0
if (debug): print (' new Link (%s, %s)' % (self.port1.name, self.port2.name))
def getOther(self, switch):
if (self.port1.dpid == switch.dpid):
return self.port2.dpid
elif (self.port2.dpid == switch.dpid):
return self.port1.dpid
else:
return ' switch %s not on link' % switch.dpid
def getSrcPort(self, switch):
if (self.port1.dpid == switch):
return self.port1
elif (self.port2.dpid == switch):
return self.port2
else:
return ' switch %s not on link' % switch
def getDstPort(self, switch):
if (self.port1.dpid == switch):
return self.port2
elif (self.port2.dpid == switch):
return self.port1
else:
return ' switch %s not on link' % switch
def build_tree(nodes, breadth):
'''Builds a tree from the list of nodes given. Each node
has 'breadth' children, until nodes run out'''
next_peer = copy.copy(nodes)
next_peer.pop(0) # remove root
if (debug): print('\n=== Building a tree topology ===')
for parent in nodes: # step through nodes
if (debug): print('current node %s' % parent.dpid)
for i in range(breadth):
if (next_peer):
child = next_peer.pop(0) # peer with next unpaired node
if (debug): print(' current child %s' % child.dpid)
parent.connectTo(child)
else:
if (debug): print(' no more nodes')
return
def build_linear(nodes):
'''Builds a line of switches'''
next_peer = copy.copy(nodes)
current = next_peer.pop(0) # remove root
if (debug): print('\n=== Building a linear topology ===')
while next_peer:
if (debug): print('current node %s' % current.dpid)
current.connectTo(next_peer[0])
current = next_peer.pop(0) # remove root
if (debug): print (' no more nodes')
return
def getData(switches):
s_dict = dict()
for s in switches:
dpid = s.dpid
ports = list()
for p in s.ports:
ports.append({'port_no':p.port_no.replace('0',''),"rx_packets": 0, "tx_packets": 0, "arrival_rate": 100.1, "depart_rate": 101.1, "total_tx": 100, "total_rx": 100, "uptime": 0})
s_dict[dpid] = ports
return s_dict
def getController(switches):
sws = list()
num_ports = 0
for s in switches:
sws.append({'dpid':s.dpid, 'total_packet_in':4})
num_ports += len(s.ports)
return { "packet_in_delta":3, "packet_in_total":num_ports*10, "duration":1, "up_time":20, "switches":sws }
def getSws(switches):
s_list = list()
for s in switches:
dpid = s.dpid
ports = list()
for p in s.ports:
ports.append({"hw_addr": p.mac, "name": p.name, "port_no": p.port_no, "dpid": p.dpid})
s_list.append({'dpid':dpid, 'ports': ports})
return s_list
def getLinks(switches):
all_links = list()
for s in switches:
for l in s.links:
src = l.getSrcPort(s.dpid)
dst = l.getDstPort(s.dpid)
all_links.append({ "src": {"hw_addr": src.mac, "name": src.name, "port_no": src.port_no, "dpid": src.dpid}, "dst": {"hw_addr": dst.mac, "name": dst.name, "port_no": dst.port_no, "dpid": dst.dpid}})
return all_links
def main(argv):
help = 'generate_offline_topology.py \n -n <num_nodes> \n -t <topology> \n -s <num_children_per_node> '
try:
opts, args = getopt.getopt(argv,"ht:n:s:",[]) # pulls out the specified options, ":" means followed by an argument
except getopt.GetoptError:
print help
sys.exit(2)
nodes = 3
type = 'tree' # type = 'linear'
split = 2
for opt, arg in opts:
if opt == '-h':
print help
sys.exit()
elif opt == '-n': # opt in ("-i","--ifile"):
nodes = int(arg)
elif opt == '-t':
type = arg
elif opt == '-s':
split = int(arg)
else:
print help
sys.exit()
filename = '%s_%s_%s.txt' % (type,nodes,split)
switches = list()
dpid = 0
for i in range(nodes):
dpid += 1
switches.append(Switch(dpid))
if (type == 'tree'):
build_tree(switches, split)
elif (type == 'linear'):
build_linear(switches)
filename = '%s_%s.txt' % (type,nodes)
print ('')
for s in switches:
print('dpid:%s ports:%d' % (s.dpid,len(s.ports)))
for p in s.ports:
print(' port:%s mac:%s' % (p.name, p.mac))
for l in s.links:
print(' linkTo:%s' % (l.getOther(s)))
try:
fd = open(filename,'a')
except IOError:
print('cannot open %s' % filename)
sys.exit(2)
# out.write(json.dumps(s_dict, indent=2 * ' '))
data = getData(switches)
controller = getController(switches)
sws = getSws(switches)
links = getLinks(switches)
out = {'data':data,'controller': controller, 'switches':sws, 'links':links}
fd.write('var scale_test_%s = ' % filename[0:-4])
fd.write(json.dumps(out, indent=2 * ' '))
print('\noutput to: %s' % filename[0:-4])
fd.close()
if __name__ == "__main__":
main(sys.argv[1:]) | apache-2.0 |
cbeust/testng | testng-core/src/test/java/test/configuration/Base3.java | 444 | package test.configuration;
import org.testng.Assert;
import org.testng.annotations.BeforeGroups;
public class Base3 {
private static boolean m_before = false;
/** @return the m_before */
public static boolean getBefore() {
return m_before;
}
@BeforeGroups("cg34-1")
public void anotherBefore1() {
Assert.assertFalse(m_before);
Assert.assertFalse(ConfigurationGroups3SampleTest.getF1());
m_before = true;
}
}
| apache-2.0 |
projectodd/stilts | stomp-server-core/src/main/java/org/projectodd/stilts/stomp/server/protocol/http/HttpResponder.java | 2358 | /*
* Copyright 2011 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.projectodd.stilts.stomp.server.protocol.http;
import org.jboss.logging.Logger;
import org.jboss.netty.channel.ChannelEvent;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelUpstreamHandler;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.DownstreamMessageEvent;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.handler.codec.http.DefaultHttpResponse;
import org.jboss.netty.handler.codec.http.HttpResponse;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import org.jboss.netty.handler.codec.http.HttpVersion;
import org.projectodd.stilts.stomp.protocol.StompFrame;
import org.projectodd.stilts.stomp.protocol.StompFrame.Command;
public class HttpResponder implements ChannelUpstreamHandler {
public HttpResponder() {
}
@Override
public void handleUpstream(ChannelHandlerContext ctx, ChannelEvent e) throws Exception {
if (e instanceof MessageEvent && ((MessageEvent) e).getMessage() instanceof StompFrame) {
StompFrame frame = (StompFrame) ((MessageEvent) e).getMessage();
if (frame.getCommand() != Command.CONNECT) {
HttpResponse httpResp = new DefaultHttpResponse( HttpVersion.HTTP_1_1, HttpResponseStatus.NO_CONTENT );
httpResp.setHeader( "Content-Length", "0" );
ctx.sendDownstream( new DownstreamMessageEvent( ctx.getChannel(), Channels.future( ctx.getChannel() ), httpResp, ctx.getChannel().getRemoteAddress() ) );
return;
}
}
ctx.sendUpstream( e );
}
@SuppressWarnings("unused")
private static Logger log = Logger.getLogger( HttpResponder.class );
}
| apache-2.0 |
Si-elegans/Web-based_GUI_Tools | static-src/cenet/scripts/layer.js | 2035 | // Copyright 2011 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Generic opacity slider manager - allows for multiple clients
* to control the opacity of layers.
*/
o3v.LayerOpacityManager = function() {
// If not null, an array of opacities sorted by outside-first.
this.layerOpacities_ = null;
// Functions to call on change.
this.callbacks = [];
};
o3v.LayerOpacityManager.prototype.init = function(numLayers) {
this.layerOpacities_ = [];
for (var i = 0; i < numLayers; ++i) {
this.layerOpacities_.push(1.0);
}
};
o3v.LayerOpacityManager.prototype.getLayerOpacities = function () {
return this.layerOpacities_;
};
o3v.LayerOpacityManager.prototype.setLayerOpacity =
function (layer, value, from) {
this.layerOpacities_[layer] = value;
this.updateAllBut(from);
};
o3v.LayerOpacityManager.prototype.setLayerOpacities = function(values, from) {
this.layerOpacities_ = values.slice(); // makes copy
this.updateAllBut(from);
};
o3v.LayerOpacityManager.prototype.addView = function(callback) {
var numViews = this.callbacks.length;
for (var i = 0; i < numViews; ++i) {
if (this.callbacks[i] == callback) {
return;
}
}
this.callbacks.push(callback);
};
o3v.LayerOpacityManager.prototype.updateAllBut = function (from) {
var numViews = this.callbacks.length;
for (var i = 0; i < numViews; ++i) {
var callback = this.callbacks[i];
if (callback != from) {
callback();
}
}
};
| apache-2.0 |
JayanthyChengan/dataverse | src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java | 93031 | /*
Copyright (C) 2005-2012, by the President and Fellows of Harvard College.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Dataverse Network - A web application to share, preserve and analyze research data.
Developed at the Institute for Quantitative Social Science, Harvard University.
Version 3.0.
*/
package edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.dta;
import java.io.*;
import java.nio.*;
import java.util.logging.*;
import java.util.*;
import java.util.regex.*;
import java.text.*;
import org.apache.commons.lang.*;
import org.apache.commons.codec.binary.Hex;
import javax.inject.Inject;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import edu.harvard.iq.dataverse.DataTable;
import edu.harvard.iq.dataverse.datavariable.DataVariable;
import edu.harvard.iq.dataverse.datavariable.VariableCategory;
//import edu.harvard.iq.dataverse.datavariable.VariableFormatType;
//import edu.harvard.iq.dataverse.datavariable.VariableServiceBean;
import edu.harvard.iq.dataverse.ingest.plugin.spi.*;
import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataFileReader;
import edu.harvard.iq.dataverse.ingest.tabulardata.spi.TabularDataFileReaderSpi;
import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataIngest;
/**
* ingest plugin for Stata DTA file format.
*
* This reader plugin has been fully re-implemented for the DVN 4.0;
* It is still borrows heavily from, and builds on the basis of the
* old implementation by Akio Sone, that was in use in the versions
* 2-3 of the DVN.
*
* @author Akio Sone at UNC-Odum
* @author landreev
*/
public class DTAFileReader extends TabularDataFileReader{
private static final Logger logger = Logger.getLogger(DTAFileReader.class.getCanonicalName());
//@Inject
//VariableServiceBean varService;
// static fields, STATA-specific constants, etc.
// (should it all be isolated in some other class?)
private static Map<Integer, String> STATA_RELEASE_NUMBER =
new HashMap<>();
private static Map<String, Integer> release105type = new LinkedHashMap<>();
private static Map<String, Integer> release111type = new LinkedHashMap<>();
private static Map<Integer, Map<String, Integer>> CONSTATNT_TABLE = new LinkedHashMap<>();
private static Map<String, Integer> release104constant = new LinkedHashMap<>();
private static Map<String, Integer> release105constant = new LinkedHashMap<>();
private static Map<String, Integer> release108constant = new LinkedHashMap<>();
private static Map<String, Integer> release110constant = new LinkedHashMap<>();
private static Map<String, Integer> release111constant = new LinkedHashMap<>();
private static Map<String, Integer> release113constant = new LinkedHashMap<>();
private static Map<String, Integer> release114constant = new LinkedHashMap<>();
private static Map<String, Integer> release115constant = new LinkedHashMap<>();
private static Map<Byte, Integer> byteLengthTable105 = new HashMap<>();
private static Map<Byte, Integer> byteLengthTable111 = new HashMap<>();
private static Map<Byte, String> variableTypeTable105 = new LinkedHashMap<>();
private static Map<Byte, String> variableTypeTable111 = new LinkedHashMap<>();
private static Map<String, Integer> variableTypeMap = new LinkedHashMap<>();
private static final int[] LENGTH_HEADER = {60, 109};
private static final int[] LENGTH_LABEL = {32, 81};
private static final int[] LENGTH_NAME = {9, 33};
private static final int[] LENGTH_FORMAT_FIELD = {7, 12, 49};
private static final int[] LENGTH_EXPANSION_FIELD ={0, 2, 4};
private static final int[] DBL_MV_PWR = {333, 1023};
static {
STATA_RELEASE_NUMBER.put(104, "rel_3");
STATA_RELEASE_NUMBER.put(105, "rel_4or5");
STATA_RELEASE_NUMBER.put(108, "rel_6");
STATA_RELEASE_NUMBER.put(110, "rel_7first");
STATA_RELEASE_NUMBER.put(111, "rel_7scnd");
STATA_RELEASE_NUMBER.put(113, "rel_8_or_9");
STATA_RELEASE_NUMBER.put(114, "rel_10"); // reading stata docs suggests
// 114 means release 11 - ?
// confused. -- L.A.
STATA_RELEASE_NUMBER.put(115, "rel_12");
// 115 is the *last* development of the "classic" Stata format.
// STATA v. 13 introduced format 117 (116 was an in-house,
// experimental version that was never released), it is a completely
// new development, incompatible with the old format.
release105type.put("STRING", 127);
release105type.put("BYTE", 98);
release105type.put("INT", 105);
release105type.put("LONG", 108);
release105type.put("FLOAT", 102);
release105type.put("DOUBLE0", 100);
release111type.put("STRING", 0);
release111type.put("BYTE", -5);
release111type.put("INT", -4);
release111type.put("LONG", -3);
release111type.put("FLOAT", -2);
release111type.put("DOUBLE", -1);
release104constant.put("HEADER", LENGTH_HEADER[0]);
release104constant.put("LABEL", LENGTH_LABEL[0]);
release104constant.put("NAME", LENGTH_NAME[0]);
release104constant.put("FORMAT", LENGTH_FORMAT_FIELD[0]);
release104constant.put("EXPANSION", LENGTH_EXPANSION_FIELD[0]);
release104constant.put("DBL_MV_PWR",DBL_MV_PWR[0]);
CONSTATNT_TABLE.put(104, release104constant);
release105constant.put("HEADER", LENGTH_HEADER[0]);
release105constant.put("LABEL", LENGTH_LABEL[0]);
release105constant.put("NAME", LENGTH_NAME[0]);
release105constant.put("FORMAT", LENGTH_FORMAT_FIELD[1]);
release105constant.put("EXPANSION", LENGTH_EXPANSION_FIELD[1]);
release105constant.put("DBL_MV_PWR",DBL_MV_PWR[0]);
CONSTATNT_TABLE.put(105, release105constant);
release108constant.put("HEADER", LENGTH_HEADER[1]);
release108constant.put("LABEL", LENGTH_LABEL[1]);
release108constant.put("NAME", LENGTH_NAME[0]);
release108constant.put("FORMAT", LENGTH_FORMAT_FIELD[1]);
release108constant.put("EXPANSION", LENGTH_EXPANSION_FIELD[1]);
release108constant.put("DBL_MV_PWR",DBL_MV_PWR[1]);
CONSTATNT_TABLE.put(108, release108constant);
release110constant.put("HEADER", LENGTH_HEADER[1]);
release110constant.put("LABEL", LENGTH_LABEL[1]);
release110constant.put("NAME", LENGTH_NAME[1]);
release110constant.put("FORMAT", LENGTH_FORMAT_FIELD[1]);
release110constant.put("EXPANSION", LENGTH_EXPANSION_FIELD[2]);
release110constant.put("DBL_MV_PWR",DBL_MV_PWR[1]);
CONSTATNT_TABLE.put(110, release110constant);
release111constant.put("HEADER", LENGTH_HEADER[1]);
release111constant.put("LABEL", LENGTH_LABEL[1]);
release111constant.put("NAME", LENGTH_NAME[1]);
release111constant.put("FORMAT", LENGTH_FORMAT_FIELD[1]);
release111constant.put("EXPANSION", LENGTH_EXPANSION_FIELD[2]);
release111constant.put("DBL_MV_PWR",DBL_MV_PWR[1]);
CONSTATNT_TABLE.put(111, release111constant);
release113constant.put("HEADER", LENGTH_HEADER[1]);
release113constant.put("LABEL", LENGTH_LABEL[1]);
release113constant.put("NAME", LENGTH_NAME[1]);
release113constant.put("FORMAT", LENGTH_FORMAT_FIELD[1]);
release113constant.put("EXPANSION", LENGTH_EXPANSION_FIELD[2]);
release113constant.put("DBL_MV_PWR",DBL_MV_PWR[1]);
CONSTATNT_TABLE.put(113, release113constant);
release114constant.put("HEADER", LENGTH_HEADER[1]);
release114constant.put("LABEL", LENGTH_LABEL[1]);
release114constant.put("NAME", LENGTH_NAME[1]);
release114constant.put("FORMAT", LENGTH_FORMAT_FIELD[2]);
release114constant.put("EXPANSION", LENGTH_EXPANSION_FIELD[2]);
release114constant.put("DBL_MV_PWR",DBL_MV_PWR[1]);
CONSTATNT_TABLE.put(114, release114constant);
release115constant.put("HEADER", LENGTH_HEADER[1]);
release115constant.put("LABEL", LENGTH_LABEL[1]);
release115constant.put("NAME", LENGTH_NAME[1]);
release115constant.put("FORMAT", LENGTH_FORMAT_FIELD[2]);
release115constant.put("EXPANSION", LENGTH_EXPANSION_FIELD[2]);
release115constant.put("DBL_MV_PWR",DBL_MV_PWR[1]);
CONSTATNT_TABLE.put(115, release115constant);
byteLengthTable105.put((byte) 98,1);
byteLengthTable105.put((byte)105,2);
byteLengthTable105.put((byte)108,4);
byteLengthTable105.put((byte)102,4);
byteLengthTable105.put((byte)100,8);
byteLengthTable111.put((byte)-5,1);
byteLengthTable111.put((byte)-4,2);
byteLengthTable111.put((byte)-3,4);
byteLengthTable111.put((byte)-2,4);
byteLengthTable111.put((byte)-1,8);
variableTypeTable105.put((byte) 98,"Byte");
variableTypeTable105.put((byte)105,"Integer");
variableTypeTable105.put((byte)108,"Long");
variableTypeTable105.put((byte)102,"Float");
variableTypeTable105.put((byte)100,"Double");
variableTypeTable111.put((byte)-5,"Byte");
variableTypeTable111.put((byte)-4,"Integer");
variableTypeTable111.put((byte)-3,"Long");
variableTypeTable111.put((byte)-2,"Float");
variableTypeTable111.put((byte)-1,"Double");
variableTypeMap.put("Byte", -5);
variableTypeMap.put("Integer",-4);
variableTypeMap.put("Long", -3);
variableTypeMap.put("Float", -2);
variableTypeMap.put("Double", -1);
variableTypeMap.put("String", 0);
}
private static String[] MIME_TYPE = {"application/x-stata"};
/** format-related constants */
private static final int DTA_MAGIC_NUMBER_LENGTH = 4;
private static final int NVAR_FIELD_LENGTH = 2;
private static final int NOBS_FIELD_LENGTH = 4;
private static final int TIME_STAMP_LENGTH = 18;
private static final int VAR_SORT_FIELD_LENGTH = 2;
private static final int VALUE_LABEL_HEADER_PADDING_LENGTH = 3;
private static int MISSING_VALUE_BIAS = 26;
private byte BYTE_MISSING_VALUE = Byte.MAX_VALUE;
private short INT_MISSIG_VALUE = Short.MAX_VALUE;
private int LONG_MISSING_VALUE = Integer.MAX_VALUE;
private static final List<Float> FLOAT_MISSING_VALUES = Arrays.asList(
0x1.000p127f, 0x1.001p127f, 0x1.002p127f, 0x1.003p127f,
0x1.004p127f, 0x1.005p127f, 0x1.006p127f, 0x1.007p127f,
0x1.008p127f, 0x1.009p127f, 0x1.00ap127f, 0x1.00bp127f,
0x1.00cp127f, 0x1.00dp127f, 0x1.00ep127f, 0x1.00fp127f,
0x1.010p127f, 0x1.011p127f, 0x1.012p127f, 0x1.013p127f,
0x1.014p127f, 0x1.015p127f, 0x1.016p127f, 0x1.017p127f,
0x1.018p127f, 0x1.019p127f, 0x1.01ap127f);
private Set<Float> FLOAT_MISSING_VALUE_SET = new HashSet<>(FLOAT_MISSING_VALUES);
private static final List<Double> DOUBLE_MISSING_VALUE_LIST = Arrays.asList(
0x1.000p1023, 0x1.001p1023, 0x1.002p1023, 0x1.003p1023, 0x1.004p1023,
0x1.005p1023, 0x1.006p1023, 0x1.007p1023, 0x1.008p1023, 0x1.009p1023,
0x1.00ap1023, 0x1.00bp1023, 0x1.00cp1023, 0x1.00dp1023, 0x1.00ep1023,
0x1.00fp1023, 0x1.010p1023, 0x1.011p1023, 0x1.012p1023, 0x1.013p1023,
0x1.014p1023, 0x1.015p1023, 0x1.016p1023, 0x1.017p1023, 0x1.018p1023,
0x1.019p1023, 0x1.01ap1023);
private Set<Double> DOUBLE_MISSING_VALUE_SET = new HashSet<>(DOUBLE_MISSING_VALUE_LIST);
private static SimpleDateFormat sdf_ymdhmsS = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); // sdf
private static SimpleDateFormat sdf_ymd = new SimpleDateFormat("yyyy-MM-dd"); // sdf2
private static SimpleDateFormat sdf_hms = new SimpleDateFormat("HH:mm:ss"); // stf
private static SimpleDateFormat sdf_yw = new SimpleDateFormat("yyyy-'W'ww");
// stata's calendar
private static Calendar GCO_STATA = new GregorianCalendar(TimeZone.getTimeZone("GMT"));
private static String[] DATE_TIME_FORMAT= {
"%tc", "%td", "%tw", "%tq","%tm", "%th", "%ty",
"%d", "%w", "%q", "%m", "h", "%tb"
};
// New "business calendar format" has been added in Stata 12. -- L.A.
private static String[] DATE_TIME_CATEGORY={
"time", "date", "date", "date", "date", "date", "date",
"date", "date", "date", "date", "date", "date"
};
private static Map<String, String> DATE_TIME_FORMAT_TABLE= new LinkedHashMap<>();
private static long MILLISECONDS_PER_DAY = 24L * 60 * 60 * 1000;
private static long STATA_BIAS_TO_EPOCH;
static {
sdf_ymdhmsS.setTimeZone(TimeZone.getTimeZone("GMT"));
sdf_ymd.setTimeZone(TimeZone.getTimeZone("GMT"));
sdf_hms.setTimeZone(TimeZone.getTimeZone("GMT"));
sdf_yw.setTimeZone(TimeZone.getTimeZone("GMT"));
// set stata's calendar
GCO_STATA.set(1, 1960);// year
GCO_STATA.set(2, 0); // month
GCO_STATA.set(5, 1);// day of month
GCO_STATA.set(9, 0);// AM(0) or PM(1)
GCO_STATA.set(10, 0);// hh
GCO_STATA.set(12, 0);// mm
GCO_STATA.set(13, 0);// ss
GCO_STATA.set(14, 0); // SS millisecond
STATA_BIAS_TO_EPOCH = GCO_STATA.getTimeInMillis(); // = -315619200000
for (int i=0; i<DATE_TIME_FORMAT.length; i++){
DATE_TIME_FORMAT_TABLE.put(DATE_TIME_FORMAT[i],DATE_TIME_CATEGORY[i]);
}
}
// instance fields //
private static Logger dbgLog = Logger.getLogger(DTAFileReader.class.getPackage().getName());
// TODO:
// add a comment explaining what this table is for:
// -- L.A. 4.0
private String[] valueLabelsLookupTable = null;
/*
* StrinLengthTable stores the byte lengths of string variables (these are
* the same fixed values for every string column).
* -- L.A. 4.0
*/
private Map<Integer, Integer> StringLengthTable = new LinkedHashMap<>();
private Map<String, Integer> typeOffsetTable ;
private Map<String, Integer> constantTable ;
private Map<Byte, Integer> byteLengthTable;
private Map<Byte, String> variableTypeTable;
private NumberFormat twoDigitFormatter = new DecimalFormat("00");
private NumberFormat doubleNumberFormatter = new DecimalFormat();
TabularDataIngest ingesteddata = new TabularDataIngest();
private DataTable dataTable = new DataTable();
private int releaseNumber;
private int headerLength;
private int dataLabelLength;
private boolean isLittleEndian = false;
private int bytes_per_row;
/* variableTypes is a list of string values representing the type of
* data values *stored* in the file - "byte", "integer", "float", "string",
* etc. We need this information as we're reading the data, to know how
* many bytes to read for every object type and how to convert the binary
* data into the proper Java type.
* It's important to note that these types are *Stata* types - the types
* of the variables on the DVN side may change (see below).
* The variableTypesFinal will describe the data values once they have
* been read and stored in the tab. file. This is an important distinction:
* for example, the time/data values are stored as binary numeric values
* in Stata files, but we'll be storing them as strings in the DVN tabular
* files.
*/
private String[] variableTypes=null;
private String[] dateVariableFormats=null;
private int value_label_table_length;
private static final String MissingValueForTabDelimitedFile = "";
// Constructor -----------------------------------------------------------//
/**
* Constructs a <code>DTAFileReader</code> instance with a
* <code>StatDataFileReaderSpi</code> object.
*
* @param originator a <code>StatDataFileReaderSpi</code> object.
*/
public DTAFileReader(TabularDataFileReaderSpi originator){
super(originator);
}
// Methods ---------------------------------------------------------------//
/*
* This method configures Stata's release-specific parameters:
*/
private void init() throws IOException {
//
if (dbgLog.isLoggable(Level.INFO)) dbgLog.info("release number="+releaseNumber);
if (releaseNumber < 111) {
typeOffsetTable = release105type;
variableTypeTable = variableTypeTable105;
byteLengthTable = byteLengthTable105;
} else {
typeOffsetTable = release111type;
variableTypeTable = variableTypeTable111;
byteLengthTable = byteLengthTable111;
BYTE_MISSING_VALUE -= MISSING_VALUE_BIAS;
INT_MISSIG_VALUE -= MISSING_VALUE_BIAS;
LONG_MISSING_VALUE -= MISSING_VALUE_BIAS;
}
if (releaseNumber <= 105){
value_label_table_length = 2;
} else {
value_label_table_length = 4;
}
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("type-offset table to be used:\n"+typeOffsetTable);
constantTable = CONSTATNT_TABLE.get(releaseNumber);
headerLength = constantTable.get("HEADER") - DTA_MAGIC_NUMBER_LENGTH;
dataLabelLength = headerLength - (NVAR_FIELD_LENGTH +
NOBS_FIELD_LENGTH + TIME_STAMP_LENGTH);
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("data_label_length="+dataLabelLength);
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("constant table to be used:\n"+constantTable);
doubleNumberFormatter.setGroupingUsed(false);
doubleNumberFormatter.setMaximumFractionDigits(340);
/*
* it's no longer necessary to use the variable service to look up
* various type entities:
* -- L.A. 4.0 beta 9
Context ctx = null;
try {
ctx = new InitialContext();
varService = (VariableServiceBean) ctx.lookup("java:global/dataverse-4.0/VariableServiceBean");
} catch (NamingException nex) {
try {
ctx = new InitialContext();
varService = (VariableServiceBean) ctx.lookup("java:global/dataverse/VariableServiceBean");
} catch (NamingException nex2) {
if (dbgLog.isLoggable(Level.INFO)) dbgLog.info("Could not look up initial context, or the variable service in JNDI!");
throw new IOException ("Could not look up initial context, or the variable service in JNDI!");
}
}
*/
}
@Override
public TabularDataIngest read(BufferedInputStream stream, File dataFile) throws IOException {
dbgLog.info("***** DTAFileReader: read() start *****");
if (dataFile != null) {
throw new IOException ("this plugin does not support external raw data files");
}
try {
decodeHeader(stream);
decodeDescriptors(stream);
decodeVariableLabels(stream);
if (releaseNumber!=104) {
decodeExpansionFields(stream);
}
decodeData(stream);
decodeValueLabels(stream);
ingesteddata.setDataTable(dataTable);
} catch (IllegalArgumentException iaex) {
throw new IOException(iaex.getMessage());
}
dbgLog.info("***** DTAFileReader: read() end *****");
return ingesteddata;
}
private void decodeHeader(BufferedInputStream stream) throws IOException {
dbgLog.fine("***** decodeHeader(): start *****");
if (stream == null) {
throw new IllegalArgumentException("stream == null!");
}
dbgLog.fine("reading the header segument 1: 4 byte\n");
byte[] magic_number = new byte[DTA_MAGIC_NUMBER_LENGTH];
int nbytes = stream.read(magic_number, 0, DTA_MAGIC_NUMBER_LENGTH);
if (nbytes == 0) {
throw new IOException();
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("hex dump: 1st 4bytes =>"
+ new String(Hex.encodeHex(magic_number)) + "<-");
}
logger.info("magic_number[0]: " + magic_number[0]);
logger.info("magic_number[1]: " + magic_number[1]);
logger.info("magic_number[2]: " + magic_number[2]);
if (magic_number[2] != 1) {
dbgLog.fine("3rd byte is not 1: given file is not stata-dta type");
// FIXME: Figure out the magic number for Stata 14.
// FIXME: Figure out the magic number for Stata 15.
throw new IllegalArgumentException("The file is not in a STATA format that we can read or support.");
} else if ((magic_number[1] != 1) && (magic_number[1] != 2)) {
dbgLog.fine("2nd byte is neither 0 nor 1: this file is not stata-dta type");
throw new IllegalArgumentException("given file is not stata-dta type");
} else if (!STATA_RELEASE_NUMBER.containsKey((int) magic_number[0])) {
dbgLog.fine("1st byte (" + magic_number[0]
+ ") is not within the ingestable range [rel. 3-10]:"
+ "we cannot ingest this Stata file.");
throw new IllegalArgumentException("given file is not stata-dta type");
} else {
releaseNumber = magic_number[0];
init();
dataTable.setOriginalFileFormat(MIME_TYPE[0]);
/*
* releaseNumber:
* for storing in the datatable, we are converting the numeric Stata
* release number into a more user friendly "version number";
* e.g., "release number 115" = "Stata v. 12"
* -- L.A. 4.0
*/
dataTable.setOriginalFormatVersion(STATA_RELEASE_NUMBER.get(releaseNumber));
dataTable.setUnf("UNF:6:FILEFILEFILEFILE");
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("this file is stata-dta type: "
+ STATA_RELEASE_NUMBER.get(releaseNumber)
+ " (that means Stata version " + releaseNumber + ")");
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("Endian(file)(Big: 1; Little:2)=" + magic_number[1]);
}
/*
* byte order: defined in the second byte of the "magic number":
*/
if (magic_number[1] == 2) {
isLittleEndian = true;
dbgLog.fine("Reversal of the bytes is necessary to decode "
+ "multi-byte fields");
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("Endian of this platform:" + ByteOrder.nativeOrder().toString());
}
}
dbgLog.fine("reading the remaining header segument 2: 60 or 109-byte");
byte[] header = new byte[headerLength];
nbytes = stream.read(header, 0, headerLength);
// 1. number of variables: short (2 bytes)
ByteBuffer bbnvar = ByteBuffer.wrap(header, 0, NVAR_FIELD_LENGTH);
ByteBuffer dupnvar = bbnvar.duplicate();
short short_nvar = dupnvar.getShort();
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("get original short view(nvar)=" + short_nvar);
}
if (isLittleEndian) {
bbnvar.order(ByteOrder.LITTLE_ENDIAN);
}
short shrt_nvar = bbnvar.getShort();
dataTable.setVarQuantity(new Long(shrt_nvar));
int nvar = shrt_nvar;
if (dbgLog.isLoggable(Level.INFO)) {
dbgLog.info("number of variables(nvar)=" + nvar);
}
// 4.0 Initialize dataverse variable objects:
List<DataVariable> variableList = new ArrayList<>();
for (int i = 0; i < nvar; i++) {
DataVariable dv = new DataVariable(i, dataTable);
variableList.add(dv);
}
dataTable.setDataVariables(variableList);
// setup variableTypeList
variableTypes = new String[nvar];
// and the date/time format list:
dateVariableFormats = new String[nvar];
// 2. number of observations: int (4 bytes)
ByteBuffer nobs = ByteBuffer.wrap(header, NVAR_FIELD_LENGTH,
NOBS_FIELD_LENGTH);
ByteBuffer dupnobs = nobs.duplicate();
int int_dupnobs = dupnobs.getInt();
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("raw nobs=" + int_dupnobs);
}
if (isLittleEndian) {
nobs.order(ByteOrder.LITTLE_ENDIAN);
}
int int_nobs = nobs.getInt();
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("reversed nobs=" + int_nobs);
}
// smd.getFileInformation().put("caseQnty", new Integer(int_nobs));
dataTable.setCaseQuantity(new Long(int_nobs));
/*
the "data label" -
note that we are not using this label for anything
(wonder what it is though? can we use it somewhere?)
but we still need to extract it from the byte stream,
since the offsets of the objects stored further up
are calculated relative to it. -- L.A., 4.0
*/
// 3. data_label: 32 or 81 bytes
int dl_offset = NVAR_FIELD_LENGTH + NOBS_FIELD_LENGTH;
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("dl_offset=" + dl_offset);
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("data_label_length=" + dataLabelLength);
}
String data_label = new String(Arrays.copyOfRange(header, dl_offset,
(dl_offset + dataLabelLength)), "ISO-8859-1");
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("data_label_length=" + data_label.length());
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("loation of the null character=" + data_label.indexOf(0));
}
String dataLabel = getNullStrippedString(data_label);
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("data_label_length=" + dataLabel.length());
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("data_label=[" + dataLabel + "]");
}
// smd.getFileInformation().put("dataLabel", dataLabel);
/* end of "data label" */
// 4. time_stamp: ASCII String (18 bytes)
// added after release 4
if (releaseNumber > 104) {
int ts_offset = dl_offset + dataLabelLength;
String time_stamp = new String(Arrays.copyOfRange(header, ts_offset,
ts_offset + TIME_STAMP_LENGTH), "ISO-8859-1");
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("time_stamp_length=" + time_stamp.length());
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("loation of the null character=" + time_stamp.indexOf(0));
}
String timeStamp = getNullStrippedString(time_stamp);
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("timeStamp_length=" + timeStamp.length());
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("timeStamp=[" + timeStamp + "]");
}
}
}
private void decodeDescriptors(BufferedInputStream stream) throws IOException {
dbgLog.fine("decodeDescriptors(): start");
if (stream == null) {
throw new IllegalArgumentException("stream == null!");
}
int nvar = dataTable.getVarQuantity().intValue();
// part 1: variable type list
decodeDescriptorVarTypeList(stream, nvar);
// part 2: Variable_Name List
// name length= 9(release 105) or 33 (release 111) each null terminated
decodeDescriptorVarNameList(stream, nvar);
// Part 3: variable sort list
// length of this field = short(2bytes)*(nvar +1)
decodeDescriptorVarSortList(stream, nvar);
// Part 4: variable format list
// VAR_FORMAT_FIELD_LENGTH (7,12, 49 bytes) * navar
// null-terminated string
decodeDescriptorVariableFormat(stream, nvar);
// Part 5: value-label list
// variable_name * nvar null-terminated String
decodeDescriptorValueLabel(stream, nvar);
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("decodeDescriptors(): end");
}
}
private void decodeDescriptorVarTypeList(BufferedInputStream stream, int nvar) throws IOException {
byte[] typeList = new byte[nvar];
// note: the offset param of read() is relative to
// the current position, not absolute position
int nbytes = stream.read(typeList, 0, nvar);
//printHexDump(typeList, "variable type list");
if (nbytes == 0) {
throw new IOException("reading the descriptior: no byte was read");
}
/*
Stata internal constants representing variable type information;
these were kindly provided by Akio:
111 type
Type: b i l f d (byte, int, long, float, double)
byte: -5 -4 -3 -2 -1 (signed byte = java's byte type)
byte: 251 252 253 254 255 (unsigned byte)
HEX: FB FC FD FE FF
105 type(type chars correspond to their hex/decimal expressions
Type: b i l f d (byte, int, long, float, double)
byte: 98 105 108 102 100 (signed byte = java's byte type)
byte: 98 105 108 102 100 (unsigned byte)
HEX: 62 69 6C 66 64
*/
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("type_offset_table:\n" + typeOffsetTable);
bytes_per_row = 0;
for (int i = 0; i < typeList.length; i++) {
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine(i + "-th value=" + typeList[i]);
DataVariable dataVariable = dataTable.getDataVariables().get(i);
/*
* How Stata types correspond to the DVN types:
* "Byte", "Integer" and "Long" become Numeric, Discrete (unless date value);
* "Float" and "Double" become Numeric, Continuous (unless date value);
* "String" becomes String;
* Date/time values stored as numeric types above, are converted into
* Strings.
* -- L.A. 4.0
*/
if (byteLengthTable.containsKey(typeList[i])) {
bytes_per_row += byteLengthTable.get(typeList[i]);
variableTypes[i] = variableTypeTable.get(typeList[i]);
String typeLabel = variableTypes[i];
if (typeLabel != null) {
dataVariable.setTypeNumeric();
if (typeLabel.equals("Byte") || typeLabel.equals("Integer") || typeLabel.equals("Long")) {
// these are treated as discrete:
dataVariable.setIntervalDiscrete();
} else if (typeLabel.equals("Float") || typeLabel.equals("Double")) {
// these are treated as contiuous:
dataVariable.setIntervalContinuous();
} else {
throw new IOException("Unrecognized type label: "+typeLabel+" for Stata type value byte "+typeList[i]+".");
}
} else {
throw new IOException("No entry in the known types table for Stata type value byte "+typeList[i]+".");
}
} else {
// pre-111 string type
if (releaseNumber < 111) {
int stringType = 256 + typeList[i];
if (stringType >= typeOffsetTable.get("STRING")) {
int string_var_length = stringType - typeOffsetTable.get("STRING");
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("string_var_length=" + string_var_length);
bytes_per_row += string_var_length;
variableTypes[i] = "String";
dataVariable.setTypeCharacter();
dataVariable.setIntervalDiscrete();
StringLengthTable.put(i, string_var_length);
} else {
throw new IOException(
"unknown variable type was detected: reading errors?");
}
} else if (releaseNumber >= 111) {
// post-111 string type
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("DTA reader: typeList[" + i + "]=" + typeList[i]);
// if the size of strXXX type is less than 128,
// the value of typeList[i] will be equal to that;
// if however it is >= 128, typeList[i] = (size - 256)
// i.e. it'll be a negative value:
int stringType = ((typeList[i] > 0) &&
(typeList[i] <= 127)) ? typeList[i] : 256 + typeList[i];
if (stringType >= typeOffsetTable.get("STRING")) {
int string_var_length = stringType - typeOffsetTable.get("STRING");
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("DTA reader: string_var_length=" + string_var_length);
bytes_per_row += string_var_length;
variableTypes[i] = "String";
dataVariable.setTypeCharacter();
dataVariable.setIntervalDiscrete();
StringLengthTable.put(i, string_var_length);
} else {
throw new IOException(
"unknown variable type was detected: reading errors?");
}
} else {
throw new IOException("uknown release number ");
}
}
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine(i + "=th\t sum=" + bytes_per_row);
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("bytes_per_row(final)=" + bytes_per_row);
dbgLog.fine("variableTypes:\n" + Arrays.deepToString(variableTypes));
dbgLog.fine("StringLengthTable=" + StringLengthTable);
}
}
private void decodeDescriptorVarNameList(BufferedInputStream stream, int nvar) throws IOException {
int length_var_name = constantTable.get("NAME");
int length_var_name_list = length_var_name * nvar;
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("length_var_name_list=" + length_var_name_list);
byte[] variableNameBytes = new byte[length_var_name_list];
int nbytes = stream.read(variableNameBytes, 0, length_var_name_list);
if (nbytes == 0) {
throw new IOException("reading the var name list: no var name was read");
}
int offset_start = 0;
int offset_end = 0;
for (DataVariable dataVariable: dataTable.getDataVariables()) {
offset_end += length_var_name;
String vari = new String(Arrays.copyOfRange(variableNameBytes, offset_start,
offset_end), "ISO-8859-1");
String varName = getNullStrippedString(vari);
dataVariable.setName(varName);
dbgLog.fine("next name=[" + varName + "]");
offset_start = offset_end;
}
}
private void decodeDescriptorVarSortList(BufferedInputStream stream, int nvar) throws IOException {
/*
* Whatever this "var sort list" is, we don't seem to be using this
* information for any purposes in particular. However, we need to read
* the bytes, to skip to the next section in the stream, if nothing else.
* -- L.A. 4.0
*/
int length_var_sort_list = VAR_SORT_FIELD_LENGTH * (nvar + 1);
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("length_var_sort_list=" + length_var_sort_list);
byte[] varSortList = new byte[length_var_sort_list];
short[] variableSortList = new short[nvar + 1];
int nbytes = stream.read(varSortList, 0, length_var_sort_list);
if (nbytes == 0) {
throw new IOException("reading error: the varSortList");
}
int offset_start = 0;
for (int i = 0; i <= nvar; i++) {
ByteBuffer bb_varSortList = ByteBuffer.wrap(varSortList,
offset_start, VAR_SORT_FIELD_LENGTH);
if (isLittleEndian) {
bb_varSortList.order(ByteOrder.LITTLE_ENDIAN);
}
variableSortList[i] = bb_varSortList.getShort();
offset_start += VAR_SORT_FIELD_LENGTH;
}
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("variableSortList=" + Arrays.toString(variableSortList));
}
/* Variable Formats are used exclusively for time and date variables.
* -- L.A. 4.0
*/
private void decodeDescriptorVariableFormat(BufferedInputStream stream, int nvar) throws IOException {
int length_var_format = constantTable.get("FORMAT");
int length_var_format_list = length_var_format * nvar;
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("length_var_format_list=" + length_var_format_list);
byte[] variableFormatList = new byte[length_var_format_list];
int nbytes = stream.read(variableFormatList, 0, length_var_format_list);
if (nbytes == 0) {
throw new IOException("reading var formats: no format was read");
}
int offset_start = 0;
int offset_end = 0;
for (int i = 0; i < nvar; i++) {
offset_end += length_var_format;
String vari = new String(Arrays.copyOfRange(variableFormatList, offset_start,
offset_end), "ISO-8859-1");
String variableFormat = getNullStrippedString(vari);
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine(i + "-th format=[" + variableFormat + "]");
String variableFormatKey = null;
if (variableFormat.startsWith("%t")) {
variableFormatKey = variableFormat.substring(0, 3);
} else {
variableFormatKey = variableFormat.substring(0, 2);
}
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine(i + " th variableFormatKey=" + variableFormatKey);
/*
* Now, let's check if this format is a known time or date format.
* If so, note that this changes the storage type of the variable!
* i.e., times and dates are stored as binary numeric values, but on
* the DVN side/in the tab files they will become strings.
* TODO: it kinda does look like we can get rid of the variableFormats[]
* list; these formats are only used if this is a recognized
* "date/time datum" (see below); so then it looks like we can
* extract this info from the DataVariable "formatschemaname".
* -- L.A. 4.0
*/
if (DATE_TIME_FORMAT_TABLE.containsKey(variableFormatKey)) {
DataVariable dataVariable = dataTable.getDataVariables().get(i);
// TODO: revisit the whole "formatschemaname" thing; -- L.A.
// Instead of populating this field with the Stata's internal
// format token (??), we should put the actual format of the
// values that we store in the tab file. And the internal
// STATA format we'll keep in this array for now:
dateVariableFormats[i] = variableFormat;
//dataTable.getDataVariables().get(i).setFormatSchemaName(variableFormat);
// TODO: make sure we do save the real format (as .setFormat() somewhere else!)
dataVariable.setFormatCategory(DATE_TIME_FORMAT_TABLE.get(variableFormatKey));
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine(i + "th var: category=" +
DATE_TIME_FORMAT_TABLE.get(variableFormatKey));
dataVariable.setTypeCharacter();
dataVariable.setIntervalDiscrete();
}
offset_start = offset_end;
}
}
private void decodeDescriptorValueLabel(BufferedInputStream stream, int nvar) throws IOException {
valueLabelsLookupTable = new String[nvar];
int length_label_name = constantTable.get("NAME");
int length_label_name_list = length_label_name * nvar;
dbgLog.fine("length_label_name=" + length_label_name_list);
byte[] labelNameList = new byte[length_label_name_list];
String[] labelNames = new String[nvar];
int nbytes = stream.read(labelNameList, 0, length_label_name_list);
if (nbytes == 0) {
throw new IOException("reading value-label list:: no var name was read");
}
int offset_start = 0;
int offset_end = 0;
for (int i = 0; i < nvar; i++) {
offset_end += length_label_name;
String vari = new String(Arrays.copyOfRange(labelNameList, offset_start,
offset_end), "ISO-8859-1");
labelNames[i] = getNullStrippedString(vari);
dbgLog.fine(i + "-th label=[" + labelNames[i] + "]");
offset_start = offset_end;
}
dbgLog.fine("labelNames=\n" + StringUtils.join(labelNames, ",\n") + "\n");
for (int i = 0; i < nvar; i++) {
if ((labelNames[i] != null) && (!labelNames[i].isEmpty())) {
valueLabelsLookupTable[i] = labelNames[i];
}
}
}
private void decodeVariableLabels(BufferedInputStream stream) throws IOException {
dbgLog.fine("decodeVariableLabels(): start");
if (stream == null) {
throw new IllegalArgumentException("stream == null!");
}
// variable label length (32 or 81 bytes)*nvar, each null-terminated
// int nvar = (Integer)smd.getFileInformation().get("varQnty");
int nvar = dataTable.getVarQuantity().intValue();
int length_var_label = constantTable.get("LABEL");
int length_var_label_list = length_var_label * nvar;
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("length_label_name=" + length_var_label_list);
}
byte[] variableLabelBytes = new byte[length_var_label_list];
int nbytes = stream.read(variableLabelBytes, 0, length_var_label_list);
if (nbytes == 0) {
throw new IOException("reading variable label list: no label was read");
}
int offset_start = 0;
int offset_end = 0;
for (int i = 0; i < nvar; i++) {
offset_end += length_var_label;
String vari = new String(Arrays.copyOfRange(variableLabelBytes, offset_start,
offset_end), "ISO-8859-1");
String variableLabelParsed = getNullStrippedString(vari);
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine(i + "-th label=[" + variableLabelParsed + "]");
}
offset_start = offset_end;
dataTable.getDataVariables().get(i).setLabel(variableLabelParsed);
}
dbgLog.fine("decodeVariableLabels(): end");
}
/*
* We don't seem to be using any of these "expansion fields" - whatever
* they are; but we need to read the section, to skip to the next one in
* the byte stream, if nothing else.
* -- L.A. 4.0
* TODO: ok, need to figure out what these are. -- AUG 6 2014
*/
private void decodeExpansionFields(BufferedInputStream stream) throws IOException {
dbgLog.fine("***** decodeExpansionFields(): start *****");
if (stream ==null){
throw new IllegalArgumentException("stream == null!");
}
// Added since release 105
// [1-byte byte_field][short(2)/int(4)_field][variable_field whose
// length is specified by the previous short/int field]
int int_type_expansion_field = constantTable.get("EXPANSION");
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("int_type_expansion_field="+int_type_expansion_field);
while(true){
byte[] firstByte = new byte[1];
byte[] lengthBytes = new byte[int_type_expansion_field];
int nbyte = stream.read(firstByte, 0, 1);
dbgLog.fine("read 1st byte");
int nbytes = stream.read(lengthBytes, 0, int_type_expansion_field);
dbgLog.fine("read next integer");
ByteBuffer bb_field_length = ByteBuffer.wrap(lengthBytes);
if (isLittleEndian){
bb_field_length.order(ByteOrder.LITTLE_ENDIAN);
dbgLog.fine("byte reversed");
}
int field_length;
if (int_type_expansion_field == 2){
field_length = bb_field_length.getShort();
} else {
field_length = bb_field_length.getInt();
}
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("field_length="+field_length);
if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("firstByte[0]="+firstByte[0]);
if ((field_length + firstByte[0]) == 0){
// reached the end of this field
break;
} else {
byte[] stringField = new byte[field_length];
nbyte = stream.read(stringField, 0, field_length);
}
}
dbgLog.fine("decodeExpansionFields(): end");
}
/**
*
* @param stream
*/
private void decodeValueLabels(BufferedInputStream stream) throws IOException {
dbgLog.fine("decodeValueLabels(): start");
if (stream == null) {
throw new IllegalArgumentException("stream == null!");
}
if (stream.available() != 0) {
if (releaseNumber <= 105) {
parseValueLabelsRelease105(stream);
} else {
parseValueLabelsReleasel108(stream);
}
} else {
dbgLog.fine("no value-label table: end of file");
}
dbgLog.fine("decodeValueLabels(): end");
}
void parseValueLabelsRelease105(BufferedInputStream stream) throws IOException {
dbgLog.fine("parseValueLabelsRelease105(): start");
if (stream == null) {
throw new IllegalArgumentException("stream == null!");
}
int nvar = dataTable.getVarQuantity().intValue();
int length_label_name = constantTable.get("NAME") + 1;
// note: caution +1 as the null character, not 9 byte
int length_value_label_header = value_label_table_length
+ length_label_name;
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("value_label_table_length=" + value_label_table_length);
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("length_value_label_header=" + length_value_label_header);
}
int length_lable_name_field = 8;
/*
Seg field byte type
1-1. no of pairs 2 int (= m)
1-2. vlt_name 10 includes char+(\0) == name used in Sec2.part 5
-----------------------------------
11
2-1. values 2*n int[]
2-2. labels 8*n char
*/
// This map will hold a temporary lookup table for all the categorical
// value-label groups we are going to find here:
// These groups have unique names, and a group *may be shared* between
// multiple variables. In the method decodeDescriptorValueLabel above
// we have populated a lookup table where variables are linked to the
// corresponding value-label groups by name. Thus we must fully populate
// the full map of all the variable groups, then go through the list
// of variables and create the dataverse variable categories from
// them. -- L.A. 4.0
Map<String, Map<String, String>> tempValueLabelTable = new LinkedHashMap<>();
for (int i = 0; i < nvar; i++) {
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("\n\n" + i + "th value-label table header");
}
byte[] valueLabelHeader = new byte[length_value_label_header];
// Part 1: reading the header of a value-label table if exists
int nbytes = stream.read(valueLabelHeader, 0,
length_value_label_header);
if (nbytes == 0) {
throw new IOException("reading value label header: no datum");
}
// 1.1 number of value-label pairs in this table (= m)
ByteBuffer bb_value_label_pairs
= ByteBuffer.wrap(valueLabelHeader, 0,
value_label_table_length);
if (isLittleEndian) {
bb_value_label_pairs.order(ByteOrder.LITTLE_ENDIAN);
//if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("value lable table lenth: byte reversed");
}
int no_value_label_pairs = bb_value_label_pairs.getShort();
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("no_value_label_pairs=" + no_value_label_pairs);
}
// 1.2 labelName
String rawLabelName = new String(Arrays.copyOfRange(
valueLabelHeader,
value_label_table_length,
(value_label_table_length + length_label_name)),
"ISO-8859-1");
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("rawLabelName(length)=" + rawLabelName.length());
}
String labelName = rawLabelName.substring(0, rawLabelName.indexOf(0));
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("label name = " + labelName + "\n");
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine(i + "-th value-label table");
}
// Part 2: reading the value-label table
// the length of the value-label table is: 2*m + 8*m = 10*m
int length_value_label_table = (value_label_table_length
+ length_lable_name_field) * no_value_label_pairs;
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("length_value_label_table=" + length_value_label_table);
}
byte[] valueLabelTable_i = new byte[length_value_label_table];
int noBytes = stream.read(valueLabelTable_i, 0,
length_value_label_table);
if (noBytes == 0) {
throw new IOException("reading value label table: no datum");
}
// 2-1. 2-byte-integer array (2*m): value array (sorted)
short[] valueList = new short[no_value_label_pairs];
int offset_value = 0;
for (int k = 0; k < no_value_label_pairs; k++) {
ByteBuffer bb_value_list
= ByteBuffer.wrap(valueLabelTable_i, offset_value,
value_label_table_length);
if (isLittleEndian) {
bb_value_list.order(ByteOrder.LITTLE_ENDIAN);
}
valueList[k] = bb_value_list.getShort();
offset_value += value_label_table_length;
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("value_list=" + Arrays.toString(valueList) + "\n");
}
// 2-2. 8-byte chars that store label data (m units of labels)
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("current offset_value=" + offset_value);
}
int offset_start = offset_value;
int offset_end = offset_value + length_lable_name_field;
String[] labelList = new String[no_value_label_pairs];
for (int l = 0; l < no_value_label_pairs; l++) {
String string_l = new String(Arrays.copyOfRange(valueLabelTable_i, offset_start,
offset_end), "ISO-8859-1");
int null_position = string_l.indexOf(0);
if (null_position != -1) {
labelList[l] = string_l.substring(0, null_position);
} else {
labelList[l] = string_l;
}
offset_start = offset_end;
offset_end += length_lable_name_field;
}
// Finally, we've reached the actual value-label pairs. We'll go
// through them and put them on the temporary lookup map:
tempValueLabelTable.put(labelName, new LinkedHashMap<>());
for (int j = 0; j < no_value_label_pairs; j++) {
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine(j + "-th pair:" + valueList[j] + "[" + labelList[j] + "]");
}
// TODO: do we need any null/empty string checks here? -- L.A. 4.0
tempValueLabelTable.get(labelName).put(Integer.toString(valueList[j]), labelList[j]);
}
if (stream.available() == 0) {
// reached the end of the file
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("reached the end of file at " + i + "th value-label Table.");
}
break;
}
} // for nvar loop
// And now we can go through the list of variables, see if any have
// value-label groups linked, then build dataverse VariableCategory
// objects for them, using the values stored in the temporary map
// we've just built:
for (int i = 0; i < nvar; i++) {
if (valueLabelsLookupTable[i] != null) {
if (tempValueLabelTable.get(valueLabelsLookupTable[i]) != null) {
// What if it is null? -- is it a legit condition, that
// a variable was advertised as having categorical values,
// but no such cat value group exists under this name?
// -- L.A.
for (String value : tempValueLabelTable.get(valueLabelsLookupTable[i]).keySet()) {
VariableCategory cat = new VariableCategory();
cat.setValue(value);
cat.setLabel(tempValueLabelTable.get(valueLabelsLookupTable[i]).get(value));
/* cross-link the variable and category to each other: */
cat.setDataVariable(dataTable.getDataVariables().get(i));
dataTable.getDataVariables().get(i).getCategories().add(cat);
}
}
}
}
dbgLog.fine("parseValueLabelsRelease105(): end");
}
private void parseValueLabelsReleasel108(BufferedInputStream stream) throws IOException {
dbgLog.fine("parseValueLabelsRelease108(): start");
if (stream == null) {
throw new IllegalArgumentException("stream == null!");
}
int nvar = dataTable.getVarQuantity().intValue();
int length_label_name = constantTable.get("NAME");
int length_value_label_header = value_label_table_length
+ length_label_name
+ VALUE_LABEL_HEADER_PADDING_LENGTH;
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("value_label_table_length=" + value_label_table_length);
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("length_value_label_header=" + length_value_label_header);
}
/*
Seg field byte type
1-1. len_vlt(Seg.2) 4 int
1-2. vlt_name 9/33 char+(\0) == name used in Sec2.part 5
1-3. padding 3 byte
-----------------------------------
16/40
2-1. n(# of vls) 4 int
2-2. m(len_labels) 4 int
2-3. label_offsets 4*n int[]
2-4. values 4*n int[]
2-5. labels m char
*/
// This map will hold a temporary lookup table for all the categorical
// value-label groups:
// These groups have unique names, and a group *may be shared* between
// multiple variables. In the method decodeDescriptorValueLabel above
// we have populated a lookup table where variables are linked to the
// corresponding value-label groups by name. Thus we must fully populate
// the full map of all the variable group, then go through the list
// of variables and create the dataverse variable categories from
// them. -- L.A. 4.0
Map<String, Map<String, String>> tempValueLabelTable = new LinkedHashMap<>();
for (int i = 0; i < nvar; i++) {
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("\n\n" + i + "th value-label table header");
}
byte[] valueLabelHeader = new byte[length_value_label_header];
// Part 1: reading the header of a value-label table if exists
int nbytes = stream.read(valueLabelHeader, 0,
length_value_label_header);
if (nbytes == 0) {
throw new IOException("reading value label header: no datum");
}
// 1.1 length_value_label_table
ByteBuffer bb_value_label_header
= ByteBuffer.wrap(valueLabelHeader, 0,
value_label_table_length);
if (isLittleEndian) {
bb_value_label_header.order(ByteOrder.LITTLE_ENDIAN);
}
int length_value_label_table = bb_value_label_header.getInt();
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("length of this value-label table="
+ length_value_label_table);
}
// 1.2 labelName
String rawLabelName = new String(Arrays.copyOfRange(
valueLabelHeader,
value_label_table_length,
(value_label_table_length + length_label_name)),
"ISO-8859-1");
String labelName = getNullStrippedString(rawLabelName);
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("label name = " + labelName + "\n");
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine(i + "-th value-label table");
}
// Part 2: reading the value-label table
byte[] valueLabelTable_i = new byte[length_value_label_table];
int noBytes = stream.read(valueLabelTable_i, 0,
length_value_label_table);
if (noBytes == 0) {
throw new IOException("reading value label table: no datum");
}
// 2-1. 4-byte-integer: number of units in this table (n)
int valueLabelTable_offset = 0;
ByteBuffer bb_value_label_pairs
= ByteBuffer.wrap(valueLabelTable_i, valueLabelTable_offset,
value_label_table_length);
if (isLittleEndian) {
bb_value_label_pairs.order(ByteOrder.LITTLE_ENDIAN);
}
int no_value_label_pairs = bb_value_label_pairs.getInt();
valueLabelTable_offset += value_label_table_length;
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("no_value_label_pairs=" + no_value_label_pairs);
}
// 2-2. 4-byte-integer: length of the label section (m bytes)
ByteBuffer bb_length_label_segment
= ByteBuffer.wrap(valueLabelTable_i, valueLabelTable_offset,
value_label_table_length);
if (isLittleEndian) {
bb_length_label_segment.order(ByteOrder.LITTLE_ENDIAN);
}
int length_label_segment = bb_length_label_segment.getInt();
valueLabelTable_offset += value_label_table_length;
// 2-3. 4-byte-integer array (4xm): offset values for the label sec.
// these "label offsets" actually appear to represent the byte
// offsets of the label strings, as stored in the next section.
// as of now, these are not used for anything, and the code
// below assumes that the labels are already in the same
// order as the numeric values! -- L.A.
int[] label_offsets = new int[no_value_label_pairs];
int byte_offset = valueLabelTable_offset;
for (int j = 0; j < no_value_label_pairs; j++) {
// note: 4-byte singed, not java's long
ByteBuffer bb_label_offset
= ByteBuffer.wrap(valueLabelTable_i, byte_offset,
value_label_table_length);
if (isLittleEndian) {
bb_label_offset.order(ByteOrder.LITTLE_ENDIAN);
dbgLog.fine("label offset: byte reversed");
}
label_offsets[j] = bb_label_offset.getInt();
dbgLog.fine("label offset [" + j + "]: " + label_offsets[j]);
byte_offset += value_label_table_length;
}
// 2-4. 4-byte-integer array (4xm): value array (sorted)
dbgLog.fine("value array");
int[] valueList = new int[no_value_label_pairs];
int offset_value = byte_offset;
for (int k = 0; k < no_value_label_pairs; k++) {
ByteBuffer bb_value_list
= ByteBuffer.wrap(valueLabelTable_i, offset_value,
value_label_table_length);
if (isLittleEndian) {
bb_value_list.order(ByteOrder.LITTLE_ENDIAN);
}
valueList[k] = bb_value_list.getInt();
offset_value += value_label_table_length;
}
// 2-5. m-byte chars that store label data (m units of labels)
String label_segment = new String(
Arrays.copyOfRange(valueLabelTable_i,
offset_value,
(length_label_segment + offset_value)), "ISO-8859-1");
// L.A. -- 2011.2.25:
// This assumes that the labels are already stored in the right
// order: (see my comment for the section 2.3 above)
//String[] labelList = label_segment.split("\0");
// Instead, we should be using the offset values obtained in
// the section 2.3 above, and select the corresponding
// substrings:
String[] labelList = new String[no_value_label_pairs];
for (int l = 0; l < no_value_label_pairs; l++) {
String lblString = null;
int lblOffset = label_offsets[l];
lblString = label_segment.substring(lblOffset);
int nullIndx = lblString.indexOf('\000');
if (nullIndx > -1) {
lblString = lblString.substring(0, nullIndx);
}
labelList[l] = lblString;
}
// this should work! -- L.A.
// (TODO: check the v105 value label parsing method, to see if
// something similar applies there)
// Finally, we've reached the actual value-label pairs. We'll go
// through them and put them on the temporary lookup map:
tempValueLabelTable.put(labelName, new LinkedHashMap<>());
for (int l = 0; l < no_value_label_pairs; l++) {
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine(l + "-th pair:" + valueList[l] + "[" + labelList[l] + "]");
}
// TODO: do we need any null/empty string checks here? -- L.A. 4.0
tempValueLabelTable.get(labelName).put(Integer.toString(valueList[l]), labelList[l]);
}
if (stream.available() == 0) {
// reached the end of the file
dbgLog.fine("reached the end of the file at " + i + "th value-label Table");
break;
}
} // for nvar loop
// And now we can go through the list of variables, see if any have
// value-label groups linked, then build dataverse VariableCategory
// objects for them, using the values stored in the temporary map
// we've just built:
// TODO: this code is duplicated between this, and the "105 version" of
// this method, above. Maybe it should be isolated in its own method.
// -- L.A. 4.0
for (int i = 0; i < nvar; i++) {
if (valueLabelsLookupTable[i] != null) {
if (tempValueLabelTable.get(valueLabelsLookupTable[i]) != null) {
// What if it is null? -- is it a legit condition, that
// a variable was advertised as having categorical values,
// but no such cat value group exists under this name?
// -- L.A.
for (String value : tempValueLabelTable.get(valueLabelsLookupTable[i]).keySet()) {
VariableCategory cat = new VariableCategory();
cat.setValue(value);
cat.setLabel(tempValueLabelTable.get(valueLabelsLookupTable[i]).get(value));
/* cross-link the variable and category to each other: */
cat.setDataVariable(dataTable.getDataVariables().get(i));
dataTable.getDataVariables().get(i).getCategories().add(cat);
}
}
}
}
dbgLog.fine("parseValueLabelsRelease108(): end");
}
private void decodeData(BufferedInputStream stream) throws IOException {
dbgLog.fine("\n***** decodeData(): start *****");
if (stream == null) {
throw new IllegalArgumentException("stream == null!");
}
//int nvar = (Integer)smd.getFileInformation().get("varQnty");
int nvar = dataTable.getVarQuantity().intValue();
//int nobs = (Integer)smd.getFileInformation().get("caseQnty");
int nobs = dataTable.getCaseQuantity().intValue();
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("data dimensions[observations x variables] = (" + nobs + "x" + nvar + ")");
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("bytes per row=" + bytes_per_row + " bytes");
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("variableTypes=" + Arrays.deepToString(variableTypes));
}
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("StringLengthTable=" + StringLengthTable);
}
// create a File object to save the tab-delimited data file
FileOutputStream fileOutTab = null;
PrintWriter pwout = null;
File tabDelimitedDataFile = File.createTempFile("tempTabfile.", ".tab");
// save the temp tab-delimited file in the return ingest object:
ingesteddata.setTabDelimitedFile(tabDelimitedDataFile);
fileOutTab = new FileOutputStream(tabDelimitedDataFile);
pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true);
/* Should we lose this dateFormat thing in 4.0?
* the UNF should be calculatable on the app side solely from the data
* stored in the tab file and the type information stored the dataVariable
* object.
* furthermore, the very idea of storing a format entry not just for
* every variable, but for every value/observation is a bit strange.
* TODO: review and confirm that, in the 3.* implementation, every
* entry in dateFormat[nvar][*] is indeed the same - except for the
* missing value entries. -- L.A. 4.0
(OK, I got rid of the dateFormat; instead I kinda sorta assume
that the format is the same for every value in a column, save for
the missing values... like this:
dataTable.getDataVariables().get(columnCounter).setFormatSchemaName(ddt.format);
BUT, this needs to be reviewed/confirmed etc!
*/
//String[][] dateFormat = new String[nvar][nobs];
for (int i = 0; i < nobs; i++) {
byte[] dataRowBytes = new byte[bytes_per_row];
Object[] dataRow = new Object[nvar];
int nbytes = stream.read(dataRowBytes, 0, bytes_per_row);
if (nbytes == 0) {
String errorMessage = "reading data: no data were read at("
+ i + "th row)";
throw new IOException(errorMessage);
}
// decoding each row
int byte_offset = 0;
for (int columnCounter = 0;
columnCounter < variableTypes.length; columnCounter++) {
Integer varType
= variableTypeMap.get(variableTypes[columnCounter]);
// 4.0 Check if this is a time/date variable:
boolean isDateTimeDatum = false;
String formatCategory = dataTable.getDataVariables().get(columnCounter).getFormatCategory();
if (formatCategory != null && (formatCategory.equals("time") || formatCategory.equals("date"))) {
isDateTimeDatum = true;
}
String variableFormat = dateVariableFormats[columnCounter];
switch (varType != null ? varType : 256) {
case -5:
// Byte case
// note: 1 byte signed
byte byte_datum = dataRowBytes[byte_offset];
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row " + columnCounter
+ "=th column byte =" + byte_datum);
}
if (byte_datum >= BYTE_MISSING_VALUE) {
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row " + columnCounter
+ "=th column byte MV=" + byte_datum);
}
dataRow[columnCounter] = MissingValueForTabDelimitedFile;
} else {
dataRow[columnCounter] = byte_datum;
}
byte_offset++;
break;
case -4:
// Stata-int (=java's short: 2byte) case
// note: 2-byte signed int, not java's int
ByteBuffer int_buffer
= ByteBuffer.wrap(dataRowBytes, byte_offset, 2);
if (isLittleEndian) {
int_buffer.order(ByteOrder.LITTLE_ENDIAN);
}
short short_datum = int_buffer.getShort();
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row " + columnCounter
+ "=th column stata int =" + short_datum);
}
if (short_datum >= INT_MISSIG_VALUE) {
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row " + columnCounter
+ "=th column stata long missing value=" + short_datum);
}
dataRow[columnCounter] = MissingValueForTabDelimitedFile;
} else {
if (isDateTimeDatum) {
DecodedDateTime ddt = decodeDateTimeData("short", variableFormat, Short.toString(short_datum));
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row , decodedDateTime " + ddt.decodedDateTime + ", format=" + ddt.format);
}
dataRow[columnCounter] = ddt.decodedDateTime;
//dateFormat[columnCounter][i] = ddt.format;
dataTable.getDataVariables().get(columnCounter).setFormat(ddt.format);
} else {
dataRow[columnCounter] = short_datum;
}
}
byte_offset += 2;
break;
case -3:
// stata-Long (= java's int: 4 byte) case
// note: 4-byte singed, not java's long
//dbgLog.fine("DATreader: stata long");
ByteBuffer long_buffer
= ByteBuffer.wrap(dataRowBytes, byte_offset, 4);
if (isLittleEndian) {
long_buffer.order(ByteOrder.LITTLE_ENDIAN);
}
int int_datum = long_buffer.getInt();
if (dbgLog.isLoggable(Level.FINE)) {
//dbgLog.fine(i + "-th row " + columnCounter
// + "=th column stata long =" + int_datum);
}
if (int_datum >= LONG_MISSING_VALUE) {
if (dbgLog.isLoggable(Level.FINE)) {
//dbgLog.fine(i + "-th row " + columnCounter
// + "=th column stata long missing value=" + int_datum);
}
dataRow[columnCounter] = MissingValueForTabDelimitedFile;
} else {
if (isDateTimeDatum) {
DecodedDateTime ddt = decodeDateTimeData("int", variableFormat, Integer.toString(int_datum));
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row , decodedDateTime " + ddt.decodedDateTime + ", format=" + ddt.format);
}
dataRow[columnCounter] = ddt.decodedDateTime;
dataTable.getDataVariables().get(columnCounter).setFormat(ddt.format);
} else {
dataRow[columnCounter] = int_datum;
}
}
byte_offset += 4;
break;
case -2:
// float case
// note: 4-byte
ByteBuffer float_buffer
= ByteBuffer.wrap(dataRowBytes, byte_offset, 4);
if (isLittleEndian) {
float_buffer.order(ByteOrder.LITTLE_ENDIAN);
}
float float_datum = float_buffer.getFloat();
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row " + columnCounter
+ "=th column float =" + float_datum);
}
if (FLOAT_MISSING_VALUE_SET.contains(float_datum)) {
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row " + columnCounter
+ "=th column float missing value=" + float_datum);
}
dataRow[columnCounter] = MissingValueForTabDelimitedFile;
} else {
if (isDateTimeDatum) {
DecodedDateTime ddt = decodeDateTimeData("float", variableFormat, doubleNumberFormatter.format(float_datum));
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row , decodedDateTime " + ddt.decodedDateTime + ", format=" + ddt.format);
}
dataRow[columnCounter] = ddt.decodedDateTime;
dataTable.getDataVariables().get(columnCounter).setFormat(ddt.format);
} else {
dataRow[columnCounter] = float_datum;
// This may be temporary - but for now (as in, while I'm testing
// 4.0 ingest against 3.* ingest, I need to be able to tell if a
// floating point value was a single, or double float in the
// original STATA file: -- L.A. Jul. 2014
dataTable.getDataVariables().get(columnCounter).setFormat("float");
}
}
byte_offset += 4;
break;
case -1:
// double case
// note: 8-byte
ByteBuffer double_buffer
= ByteBuffer.wrap(dataRowBytes, byte_offset, 8);
if (isLittleEndian) {
double_buffer.order(ByteOrder.LITTLE_ENDIAN);
}
double double_datum = double_buffer.getDouble();
if (DOUBLE_MISSING_VALUE_SET.contains(double_datum)) {
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row " + columnCounter
+ "=th column double missing value=" + double_datum);
}
dataRow[columnCounter] = MissingValueForTabDelimitedFile;
} else {
if (isDateTimeDatum) {
DecodedDateTime ddt = decodeDateTimeData("double", variableFormat, doubleNumberFormatter.format(double_datum));
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row , decodedDateTime " + ddt.decodedDateTime + ", format=" + ddt.format);
}
dataRow[columnCounter] = ddt.decodedDateTime;
dataTable.getDataVariables().get(columnCounter).setFormat(ddt.format);
} else {
dataRow[columnCounter] = doubleNumberFormatter.format(double_datum);
}
}
byte_offset += 8;
break;
case 0:
// String case
int strVarLength = StringLengthTable.get(columnCounter);
String raw_datum = new String(Arrays.copyOfRange(dataRowBytes, byte_offset,
(byte_offset + strVarLength)), "ISO-8859-1");
// TODO:
// is it the right thing to do, to default to "ISO-8859-1"?
// (it may be; since there's no mechanism for specifying
// alternative encodings in Stata, this may be their default;
// it just needs to be verified. -- L.A. Jul. 2014)
String string_datum = getNullStrippedString(raw_datum);
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row " + columnCounter
+ "=th column string =" + string_datum);
}
if (string_datum.isEmpty()) {
if (dbgLog.isLoggable(Level.FINER)) {
dbgLog.finer(i + "-th row " + columnCounter
+ "=th column string missing value=" + string_datum);
}
// TODO:
/* Is this really a missing value case?
* Or is it an honest empty string?
* Is there such a thing as a missing value for a String in Stata?
* -- L.A. 4.0
*/
dataRow[columnCounter] = MissingValueForTabDelimitedFile;
} else {
/*
* Some special characters, like new lines and tabs need to
* be escaped - otherwise they will break our TAB file
* structure!
* But before we escape anything, all the back slashes
* already in the string need to be escaped themselves.
*/
String escapedString = string_datum.replace("\\", "\\\\");
// escape quotes:
escapedString = escapedString.replaceAll("\"", Matcher.quoteReplacement("\\\""));
// escape tabs and new lines:
escapedString = escapedString.replaceAll("\t", Matcher.quoteReplacement("\\t"));
escapedString = escapedString.replaceAll("\n", Matcher.quoteReplacement("\\n"));
escapedString = escapedString.replaceAll("\r", Matcher.quoteReplacement("\\r"));
// the escaped version of the string is stored in the tab file
// enclosed in double-quotes; this is in order to be able
// to differentiate between an empty string (tab-delimited empty string in
// double quotes) and a missing value (tab-delimited empty string).
// Although the question still remains - is it even possible
// to store an empty string, that's not a missing value, in Stata?
// - see the comment in the missing value case above. -- L.A. 4.0
dataRow[columnCounter] = "\"" + escapedString + "\"";
}
byte_offset += strVarLength;
break;
default:
dbgLog.fine("unknown variable type found");
String errorMessage
= "unknow variable Type found at data section";
throw new InvalidObjectException(errorMessage);
} // switch
} // for-columnCounter
// Dump the row of data to the tab-delimited file we are producing:
pwout.println(StringUtils.join(dataRow, "\t"));
if (dbgLog.isLoggable(Level.FINE)) {
//dbgLog.fine(i + "-th row's data={" + StringUtils.join(dataRow, ",") + "};");
}
} // for- i (row)
pwout.close();
if (dbgLog.isLoggable(Level.FINE)) {
dbgLog.fine("variableTypes:\n" + Arrays.deepToString(variableTypes));
}
dbgLog.fine("DTA Ingest: decodeData(): end.");
}
private class DecodedDateTime {
String format;
String decodedDateTime;
}
private DecodedDateTime decodeDateTimeData(String storageType, String FormatType, String rawDatum) throws IOException {
if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer("(storageType, FormatType, rawDatum)=("+
storageType +", " +FormatType +", " +rawDatum+")");
/*
* Historical note:
pseudofunctions, td(), tw(), tm(), tq(), and th()
used to be called d(), w(), m(), q(), and h().
Those names still work but are considered anachronisms.
*/
long milliSeconds;
String decodedDateTime=null;
String format = null;
if (FormatType.matches("^%tc.*")){
// tc is a relatively new format
// datum is millisecond-wise
milliSeconds = Math.round(new Double(rawDatum)) + STATA_BIAS_TO_EPOCH;
decodedDateTime = sdf_ymdhmsS.format(new Date(milliSeconds));
format = sdf_ymdhmsS.toPattern();
if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer("tc: result="+decodedDateTime+", format = "+format);
} else if (FormatType.matches("^%t?d.*")){
milliSeconds = Long.parseLong(rawDatum) * MILLISECONDS_PER_DAY + STATA_BIAS_TO_EPOCH;
if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer("milliSeconds="+milliSeconds);
decodedDateTime = sdf_ymd.format(new Date(milliSeconds));
format = sdf_ymd.toPattern();
if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer("td:"+decodedDateTime+", format = "+format);
} else if (FormatType.matches("^%t?w.*")){
long weekYears = Math.round(new Double(rawDatum));
long left = Math.abs(weekYears)%52L;
long years;
if (weekYears < 0L){
left = 52L - left;
if (left == 52L){
left = 0L;
}
//out.println("left="+left);
years = (Math.abs(weekYears) -1)/52L +1L;
years *= -1L;
} else {
years = weekYears/52L;
}
String yearString = Long.toString(1960L + years);
String dayInYearString = new DecimalFormat("000").format((left*7) + 1);
String yearDayInYearString = yearString + "-" + dayInYearString;
Date tempDate = null;
try {
tempDate = new SimpleDateFormat("yyyy-DDD").parse(yearDayInYearString);
} catch (ParseException ex) {
throw new IOException(ex);
}
decodedDateTime = sdf_ymd.format(tempDate.getTime());
format = sdf_ymd.toPattern();
} else if (FormatType.matches("^%t?m.*")){
// month
long monthYears = Math.round(new Double(rawDatum));
long left = Math.abs(monthYears)%12L;
long years;
if (monthYears < 0L){
left = 12L - left;
//out.println("left="+left);
years = (Math.abs(monthYears) -1)/12L +1L;
years *= -1L;
} else {
years = monthYears/12L;
}
String month = null;
if (left == 12L){
left = 0L;
}
Long monthdata = (left+1);
month = "-"+twoDigitFormatter.format(monthdata)+"-01";
long year = 1960L + years;
String monthYear = Long.toString(year) + month;
if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer("rawDatum="+rawDatum+": monthYear="+monthYear);
decodedDateTime = monthYear;
format = "yyyy-MM-dd";
if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer("tm:"+decodedDateTime+", format:"+format);
} else if (FormatType.matches("^%t?q.*")){
// quater
long quaterYears = Math.round(new Double(rawDatum));
long left = Math.abs(quaterYears)%4L;
long years;
if (quaterYears < 0L){
left = 4L - left;
//out.println("left="+left);
years = (Math.abs(quaterYears) -1)/4L +1L;
years *= -1L;
} else {
years = quaterYears/4L;
}
String quater = null;
if ((left == 0L) || (left == 4L)){
//quater ="q1"; //
quater = "-01-01";
} else if (left ==1L) {
//quater = "q2"; //
quater = "-04-01";
} else if (left ==2L) {
//quater = "q3"; //
quater = "-07-01";
} else if (left ==3L) {
//quater = "q4"; //
quater = "-11-01";
}
long year = 1960L + years;
String quaterYear = Long.toString(year) + quater;
if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer("rawDatum="+rawDatum+": quaterYear="+quaterYear);
decodedDateTime = quaterYear;
format = "yyyy-MM-dd";
if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer("tq:"+decodedDateTime+", format:"+format);
} else if (FormatType.matches("^%t?h.*")){
// half year
// odd number:2nd half
// even number: 1st half
long halvesYears = Long.parseLong(rawDatum);
long left = Math.abs(halvesYears)%2L;
long years;
if (halvesYears < 0L){
years = (Math.abs(halvesYears) -1)/2L +1L;
years *= -1L;
} else {
years = halvesYears/2L;
}
String half = null;
if (left != 0L){
// odd number => 2nd half: "h2"
//half ="h2"; //
half = "-07-01";
} else {
// even number => 1st half: "h1"
//half = "h1"; //
half = "-01-01";
}
long year = 1960L + years;
String halfYear = Long.toString(year) + half;
if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer("rawDatum="+rawDatum+": halfYear="+halfYear);
decodedDateTime = halfYear;
format = "yyyy-MM-dd";
if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer("th:"+decodedDateTime+", format:"+format);
} else if (FormatType.matches("^%t?y.*")){
// year type's origin is 0 AD
decodedDateTime = rawDatum;
format = "yyyy";
if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer("th:"+decodedDateTime);
} else {
decodedDateTime = rawDatum;
format=null;
}
DecodedDateTime retValue = new DecodedDateTime();
retValue.decodedDateTime = decodedDateTime;
retValue.format = format;
return retValue;
}
}
| apache-2.0 |
rockmkd/datacollector | container/src/main/java/com/streamsets/datacollector/execution/alerts/MetricRuleEvaluatorHelper.java | 12848 | /*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.execution.alerts;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Metric;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.streamsets.datacollector.alerts.AlertsUtil;
import com.streamsets.datacollector.config.MetricElement;
import com.streamsets.datacollector.config.MetricType;
import com.streamsets.datacollector.definition.ConcreteELDefinitionExtractor;
import com.streamsets.datacollector.el.ELEvaluator;
import com.streamsets.datacollector.el.ELVariables;
import com.streamsets.datacollector.el.RuleELRegistry;
import com.streamsets.datacollector.metrics.ExtendedMeter;
import com.streamsets.datacollector.metrics.MetricsConfigurator;
import com.streamsets.datacollector.runner.PipeRunner;
import com.streamsets.datacollector.runner.RuntimeStats;
import com.streamsets.datacollector.util.ObserverException;
import com.streamsets.pipeline.api.impl.Utils;
import java.util.Map;
public class MetricRuleEvaluatorHelper {
private static final String VAL = "value()";
private static final String TIME_NOW = "time:now()";
private static final String START_TIME = "pipeline:startTime()";
private static final ELEvaluator EL_EVALUATOR = new ELEvaluator(
"condition", false, ConcreteELDefinitionExtractor.get(),
RuleELRegistry.getRuleELs(RuleELRegistry.GENERAL)
);
public static Metric getMetric(MetricRegistry metrics, String metricId, MetricType metricType) {
Metric metric;
switch (metricType) {
case HISTOGRAM:
metric = MetricsConfigurator.getHistogram(metrics, metricId);
break;
case METER:
metric = MetricsConfigurator.getMeter(metrics, metricId);
break;
case COUNTER:
metric = MetricsConfigurator.getCounter(metrics, metricId);
break;
case TIMER:
metric = MetricsConfigurator.getTimer(metrics, metricId);
break;
case GAUGE:
metric = MetricsConfigurator.getGauge(metrics, metricId);
break;
default :
throw new IllegalArgumentException(Utils.format("Unknown metric type '{}'", metricType));
}
return metric;
}
public static Object getTimerValue(Timer t, MetricElement metricElement) {
Object value = null;
if (t != null) {
switch (metricElement) {
case TIMER_COUNT:
value = t.getCount();
break;
case TIMER_M15_RATE:
value = t.getFifteenMinuteRate();
break;
case TIMER_M1_RATE:
value = t.getOneMinuteRate();
break;
case TIMER_M5_RATE:
value = t.getFiveMinuteRate();
break;
case TIMER_MAX:
value = t.getSnapshot().getMax();
break;
case TIMER_MEAN:
value = t.getSnapshot().getMean();
break;
case TIMER_MEAN_RATE:
value = t.getMeanRate();
break;
case TIMER_MIN:
value = t.getSnapshot().getMin();
break;
case TIMER_P50:
value = t.getSnapshot().getMedian();
break;
case TIMER_P75:
value = t.getSnapshot().get75thPercentile();
break;
case TIMER_P95:
value = t.getSnapshot().get95thPercentile();
break;
case TIMER_P98:
value = t.getSnapshot().get98thPercentile();
break;
case TIMER_P99:
value = t.getSnapshot().get99thPercentile();
break;
case TIMER_P999:
value = t.getSnapshot().get999thPercentile();
break;
case TIMER_STD_DEV:
value = t.getSnapshot().getStdDev();
break;
default:
throw new IllegalArgumentException("Unexpected metric element " + metricElement);
}
}
return value;
}
public static Object getCounterValue(Counter counter, MetricElement metricElement) {
Object value;
switch (metricElement) {
case COUNTER_COUNT:
value = counter.getCount();
break;
default:
throw new IllegalStateException("Unexpected metric element type " + metricElement);
}
return value;
}
public static Object getMeterValue(ExtendedMeter meter, MetricElement metricElement) {
Object value;
switch (metricElement) {
case METER_COUNT:
value = meter.getCount();
break;
case METER_H12_RATE:
value = meter.getTwelveHourRate();
break;
case METER_H1_RATE:
value = meter.getOneHourRate();
break;
case METER_H24_RATE:
value = meter.getTwentyFourHourRate();
break;
case METER_H6_RATE:
value = meter.getSixHourRate();
break;
case METER_M15_RATE:
value = meter.getFifteenMinuteRate();
break;
case METER_M1_RATE:
value = meter.getOneMinuteRate();
break;
case METER_M30_RATE:
value = meter.getThirtyMinuteRate();
break;
case METER_M5_RATE:
value = meter.getFiveMinuteRate();
break;
case METER_MEAN_RATE:
value = meter.getMeanRate();
break;
default:
throw new IllegalStateException("Unexpected metric element type " + metricElement);
}
return value;
}
public static Object getHistogramValue(Histogram histogram, MetricElement metricElement) {
Object value;
switch (metricElement) {
case HISTOGRAM_COUNT:
value = histogram.getCount();
break;
case HISTOGRAM_MAX:
value = histogram.getSnapshot().getMax();
break;
case HISTOGRAM_MEAN:
value = histogram.getSnapshot().getMean();
break;
case HISTOGRAM_MIN:
value = histogram.getSnapshot().getMin();
break;
case HISTOGRAM_MEDIAN:
value = histogram.getSnapshot().getMedian();
break;
case HISTOGRAM_P75:
value = histogram.getSnapshot().get75thPercentile();
break;
case HISTOGRAM_P95:
value = histogram.getSnapshot().get95thPercentile();
break;
case HISTOGRAM_P98:
value = histogram.getSnapshot().get98thPercentile();
break;
case HISTOGRAM_P99:
value = histogram.getSnapshot().get99thPercentile();
break;
case HISTOGRAM_P999:
value = histogram.getSnapshot().get999thPercentile();
break;
case HISTOGRAM_STD_DEV:
value = histogram.getSnapshot().getStdDev();
break;
default:
throw new IllegalStateException("Unexpected metric element type " + metricElement);
}
return value;
}
public static Object getGaugeValue(Gauge gauge, MetricElement metricElement) {
Object value;
RuntimeStats runtimeStats = (RuntimeStats)gauge.getValue();
switch (metricElement) {
case TIME_OF_LAST_RECEIVED_RECORD:
value = runtimeStats.getTimeOfLastReceivedRecord();
break;
case LAST_BATCH_INPUT_RECORDS_COUNT:
value = runtimeStats.getLastBatchInputRecordsCount();
break;
case LAST_BATCH_OUTPUT_RECORDS_COUNT:
value = runtimeStats.getLastBatchOutputRecordsCount();
break;
case LAST_BATCH_ERROR_RECORDS_COUNT:
value = runtimeStats.getLastBatchErrorRecordsCount();
break;
case LAST_BATCH_ERROR_MESSAGES_COUNT:
value = runtimeStats.getLastBatchErrorMessagesCount();
break;
default:
throw new IllegalStateException("Unexpected metric element type " + metricElement);
}
return value;
}
public static boolean evaluate(long pipelineStartTime, Object value, String condition) throws ObserverException {
//predicate String is of the form "val()<200" or "val() < 200 && val() > 100" etc
//replace val() with the actual value, append dollar and curly braces and evaluate the resulting EL expression
// string
String predicateWithValue = condition
.replace(VAL, String.valueOf(value))
.replace(TIME_NOW, System.currentTimeMillis() + "")
.replace(START_TIME, pipelineStartTime + "");
return AlertsUtil.evaluateExpression(predicateWithValue, new ELVariables(), EL_EVALUATOR);
}
public static Object getMetricValue(
MetricElement metricElement,
MetricType metricType,
Metric metric
) throws ObserverException {
Object value;
switch (metricType) {
case HISTOGRAM:
value = MetricRuleEvaluatorHelper.getHistogramValue((Histogram) metric, metricElement);
break;
case METER:
value = MetricRuleEvaluatorHelper.getMeterValue((ExtendedMeter) metric, metricElement);
break;
case COUNTER:
value = MetricRuleEvaluatorHelper.getCounterValue((Counter) metric, metricElement);
break;
case TIMER:
value = MetricRuleEvaluatorHelper.getTimerValue((Timer) metric, metricElement);
break;
case GAUGE:
value = MetricRuleEvaluatorHelper.getGaugeValue((Gauge) metric, metricElement);
break;
default :
throw new IllegalArgumentException(Utils.format("Unknown metric type '{}'", metricType));
}
return value;
}
/**
* Get metric value for given rule evaluation.
*
* @param metrics Metric Registry for the pipeline.
* @param metricId Name of the metric where the value is "usually" stored. This method cover mappings of metrics
* that got historically moved.
* @param metricType Type of the metric
* @param metricElement Value that caller needs in order to assert the right condition.
* @return Requested metric value or null if it doesn't exists
* @throws ObserverException
*/
public static Object getMetricValue(
MetricRegistry metrics,
String metricId,
MetricType metricType,
MetricElement metricElement
) throws ObserverException {
// We moved the logic of CURRENT_BATCH_AGE and TIME_IN_CURRENT_STAGE due to multi-threaded framework
if(metricElement.isOneOf(MetricElement.CURRENT_BATCH_AGE, MetricElement.TIME_IN_CURRENT_STAGE)) {
switch (metricElement) {
case CURRENT_BATCH_AGE:
return getTimeFromRunner(metrics, PipeRunner.METRIC_BATCH_START_TIME);
case TIME_IN_CURRENT_STAGE:
return getTimeFromRunner(metrics, PipeRunner.METRIC_STAGE_START_TIME);
default:
throw new IllegalStateException(Utils.format("Unknown metric type '{}'", metricType));
}
}
// Default path
Metric metric = getMetric(
metrics,
metricId,
metricType
);
if(metric != null) {
return getMetricValue(metricElement, metricType, metric);
}
return null;
}
/**
* Return calculated metric - from all the runners that are available for given pipeline, return the biggest difference
* between given metric and System.currentTimeMillis(). The semantic is that the runner metric stores start time of
* certain events (batch start time, stage start time) and we need to find out what is the longer running time for one
* of those metrics.
*
* @param metrics Metric registry for given pipeline.
* @param runnerMetricName Name of the PipeRunner metric that contains start time of given action.
* @return
*/
private static long getTimeFromRunner(
MetricRegistry metrics,
String runnerMetricName
) {
// First get number of total runners from the runtime gauge
RuntimeStats runtimeStats = (RuntimeStats) ((Gauge)getMetric(metrics, "RuntimeStatsGauge.gauge", MetricType.GAUGE)).getValue();
long totalRunners = runtimeStats.getTotalRunners();
long currentTime = System.currentTimeMillis();
long maxTime = 0;
// Then iterate over all runners and find the biggest time difference
for(int runnerId = 0; runnerId < totalRunners; runnerId++) {
Map<String, Object> runnerMetrics = (Map<String, Object>) ((Gauge)getMetric(metrics, "runner." + runnerId, MetricType.GAUGE)).getValue();
// Get current value
long value = (long) runnerMetrics.getOrDefault(runnerMetricName, 0L);
long runTime = currentTime - value;
if(maxTime < runTime) {
maxTime = runTime;
}
}
return maxTime;
}
}
| apache-2.0 |
annabaker/jbpm-wb | jbpm-wb-integration/jbpm-wb-integration-backend/src/main/java/org/jbpm/workbench/wi/backend/server/dd/DDEditorServiceImpl.java | 22109 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.workbench.wi.backend.server.dd;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import javax.inject.Named;
import javax.lang.model.SourceVersion;
import org.guvnor.common.services.backend.exceptions.ExceptionUtilities;
import org.guvnor.common.services.backend.util.CommentedOptionFactory;
import org.guvnor.common.services.shared.message.Level;
import org.guvnor.common.services.shared.metadata.model.Metadata;
import org.guvnor.common.services.shared.metadata.model.Overview;
import org.guvnor.common.services.shared.validation.model.ValidationMessage;
import org.jboss.errai.bus.server.annotations.Service;
import org.jbpm.workbench.wi.dd.model.DeploymentDescriptorModel;
import org.jbpm.workbench.wi.dd.model.ItemObjectModel;
import org.jbpm.workbench.wi.dd.model.Parameter;
import org.jbpm.workbench.wi.dd.service.DDEditorService;
import org.jbpm.workbench.wi.dd.validation.DeploymentDescriptorValidationMessage;
import org.kie.internal.runtime.conf.AuditMode;
import org.kie.internal.runtime.conf.DeploymentDescriptor;
import org.kie.internal.runtime.conf.NamedObjectModel;
import org.kie.internal.runtime.conf.ObjectModel;
import org.kie.internal.runtime.conf.PersistenceMode;
import org.kie.internal.runtime.conf.RuntimeStrategy;
import org.kie.internal.runtime.manager.deploy.DeploymentDescriptorIO;
import org.kie.internal.runtime.manager.deploy.DeploymentDescriptorImpl;
import org.kie.internal.runtime.manager.deploy.DeploymentDescriptorManager;
import org.kie.workbench.common.services.backend.service.KieService;
import org.kie.workbench.common.services.shared.kmodule.SingleValueItemObjectModel;
import org.mvel2.CompileException;
import org.mvel2.MVEL;
import org.mvel2.ParserContext;
import org.uberfire.backend.server.util.Paths;
import org.uberfire.backend.vfs.Path;
import org.uberfire.io.IOService;
import org.uberfire.java.nio.base.options.CommentedOption;
@Service
@ApplicationScoped
public class DDEditorServiceImpl
extends KieService<DeploymentDescriptorModel>
implements DDEditorService {
public static final String I18N_KEY_MISSING_IDENTIFIER = "DDValidationMissingIdentifier";
public static final String I18N_KEY_MISSING_RESOLVER = "DDValidationMissingResolver";
public static final String I18N_KEY_NOT_VALID_RESOLVER = "DDValidationNotValidResolver";
public static final String I18N_KEY_NOT_VALID_REFLECTION_IDENTIFIER = "DDValidationNotValidReflectionIdentifier";
public static final String I18N_KEY_NOT_VALID_MVEL_IDENTIFIER = "DDValidationNotValidMvelIdentifier";
public static final String I18N_KEY_MISSING_NAME = "DDValidationMissingName";
public static final String I18N_KEY_UNEXPECTED_ERROR = "DDValidationUnexpectedError";
@Inject
@Named("ioStrategy")
private IOService ioService;
@Inject
private DDConfigUpdaterHelper configUpdaterHelper;
@Inject
private CommentedOptionFactory commentedOptionFactory;
@Override
public DeploymentDescriptorModel load(Path path) {
return super.loadContent(path);
}
@Override
protected DeploymentDescriptorModel constructContent(Path path,
Overview overview) {
InputStream input = ioService.newInputStream(Paths.convert(path));
DeploymentDescriptor originDD = DeploymentDescriptorIO.fromXml(input);
DeploymentDescriptorModel ddModel = marshal(originDD);
ddModel.setOverview(overview);
return ddModel;
}
@Override
public Path save(Path path,
DeploymentDescriptorModel content,
Metadata metadata,
String comment) {
try {
save(path,
content,
metadata,
commentedOptionFactory.makeCommentedOption(comment));
return path;
} catch (Exception e) {
throw ExceptionUtilities.handleException(e);
}
}
//Don't expose this method in the service API just in case we wants to remove the automatic updates for the descriptor.
public Path save(Path path,
DeploymentDescriptorModel content,
Metadata metadata,
CommentedOption commentedOption) {
try {
String deploymentContent = unmarshal(path,
content).toXml();
Metadata currentMetadata = metadataService.getMetadata(path);
ioService.write(Paths.convert(path),
deploymentContent,
metadataService.setUpAttributes(path,
metadata),
commentedOption);
fireMetadataSocialEvents(path,
currentMetadata,
metadata);
return path;
} catch (Exception e) {
throw ExceptionUtilities.handleException(e);
}
}
@Override
public List<ValidationMessage> validate(Path path,
DeploymentDescriptorModel content) {
final List<ValidationMessage> validationMessages = new ArrayList<ValidationMessage>();
try {
DeploymentDescriptor dd = unmarshal(path, content);
// validate the content of the descriptor
validationMessages.addAll(validateObjectModels(path, dd.getConfiguration()));
validationMessages.addAll(validateObjectModels(path, dd.getEnvironmentEntries()));
validationMessages.addAll(validateObjectModels(path, dd.getEventListeners()));
validationMessages.addAll(validateObjectModels(path, dd.getGlobals()));
validationMessages.addAll(validateObjectModels(path, dd.getMarshallingStrategies()));
validationMessages.addAll(validateObjectModels(path, dd.getTaskEventListeners()));
validationMessages.addAll(validateObjectModels(path, dd.getWorkItemHandlers()));
// validate its structure
dd.toXml();
} catch (Exception e) {
final ValidationMessage msg = new ValidationMessage();
msg.setPath(path);
msg.setLevel(Level.ERROR);
msg.setText(e.getMessage());
validationMessages.add(msg);
}
return validationMessages;
}
@Override
public String toSource(Path path,
DeploymentDescriptorModel model) {
try {
return unmarshal(path,
model).toXml();
} catch (Exception e) {
throw ExceptionUtilities.handleException(e);
}
}
// helper methods
protected List<ValidationMessage> validateObjectModels(Path path, List<? extends ObjectModel> objectModels) {
final List<ValidationMessage> validationMessages = new ArrayList<ValidationMessage>();
objectModels.forEach(model -> {
String identifier = model.getIdentifier();
if (identifier == null || identifier.isEmpty()) {
validationMessages.add(
newMessage(
path,
"Identifier cannot be empty for " + model.getIdentifier(),
Level.ERROR,
I18N_KEY_MISSING_IDENTIFIER,
model.getIdentifier()));
}
String resolver = model.getResolver();
if (resolver == null) {
validationMessages.add(
newMessage(
path,
"No resolver selected for " + model.getIdentifier(),
Level.ERROR,
I18N_KEY_MISSING_RESOLVER,
model.getIdentifier()));
}
else if (resolver.equalsIgnoreCase(ItemObjectModel.MVEL_RESOLVER)) {
try {
ParserContext parserContext = new ParserContext();
MVEL.compileExpression(identifier, parserContext);
} catch (CompileException e) {
StringBuilder text = new StringBuilder();
text.append("Could not compile mvel expression '" + model.getIdentifier() +"'.")
.append(" this can be due to invalid syntax of missing classes")
.append("-")
.append(e.getMessage());
validationMessages.add(
newMessage(
path,
text.toString(),
Level.WARNING,
I18N_KEY_NOT_VALID_MVEL_IDENTIFIER,
model.getIdentifier(),
e.getMessage()));
}
} else if (resolver.equalsIgnoreCase(ItemObjectModel.REFLECTION_RESOLVER)) {
if (!SourceVersion.isName(identifier)) {
validationMessages.add(
newMessage(
path,
"Identifier is not valid Java class which is required by reflection resolver " + model.getIdentifier(),
Level.ERROR,
I18N_KEY_NOT_VALID_REFLECTION_IDENTIFIER,
model.getIdentifier()));
}
} else {
validationMessages.add(
newMessage(
path,
"Not valid resolver selected for " + model.getIdentifier(),
Level.ERROR,
I18N_KEY_NOT_VALID_RESOLVER,
model.getIdentifier()));
}
if (model instanceof NamedObjectModel) {
String name = ((NamedObjectModel) model).getName();
if (name == null || name.isEmpty()) {
validationMessages.add(
newMessage(path,
"Name cannot be empty for " + model.getIdentifier(),
Level.ERROR,
I18N_KEY_MISSING_NAME,
model.getIdentifier()));
}
}
});
return validationMessages;
}
protected ValidationMessage newMessage(Path path, String text, Level level, String key, Object... args) {
final DeploymentDescriptorValidationMessage msg = new DeploymentDescriptorValidationMessage();
msg.setPath(path);
msg.setLevel(level);
msg.setText(text);
msg.setKey(key);
msg.setArgs(args);
return msg;
}
protected DeploymentDescriptorModel marshal(DeploymentDescriptor originDD) {
DeploymentDescriptorModel ddModel = new DeploymentDescriptorModel();
ddModel.setPersistenceUnitName(originDD.getPersistenceUnit());
ddModel.setAuditPersistenceUnitName(originDD.getAuditPersistenceUnit());
ddModel.setAuditMode(originDD.getAuditMode().toString());
ddModel.setPersistenceMode(originDD.getPersistenceMode().toString());
ddModel.setRuntimeStrategy(originDD.getRuntimeStrategy().toString());
// marshaling strategies
List<ObjectModel> marshallingStrategies = originDD.getMarshallingStrategies();
ddModel.setMarshallingStrategies(processObjectModel(marshallingStrategies));
// event listeners
List<ObjectModel> eventListeners = originDD.getEventListeners();
ddModel.setEventListeners(processObjectModel(eventListeners));
// globals
List<NamedObjectModel> globals = originDD.getGlobals();
ddModel.setGlobals(processNamedObjectModel(globals));
// work item handlers
List<NamedObjectModel> workItemHandlers = originDD.getWorkItemHandlers();
ddModel.setWorkItemHandlers(processNamedObjectModel(workItemHandlers));
// event listeners
List<ObjectModel> taskEventListeners = originDD.getTaskEventListeners();
ddModel.setTaskEventListeners(processObjectModel(taskEventListeners));
// environment entries
List<NamedObjectModel> environmentEntries = originDD.getEnvironmentEntries();
ddModel.setEnvironmentEntries(processNamedObjectModel(environmentEntries));
// configuration
List<NamedObjectModel> configuration = originDD.getConfiguration();
ddModel.setConfiguration(processNamedObjectModel(configuration));
// required roles
ddModel.setRequiredRoles(originDD.getRequiredRoles().stream().map( r -> new SingleValueItemObjectModel(r)).collect(Collectors.toList()));
// remoteable classes
ddModel.setRemotableClasses(originDD.getClasses().stream().map( c -> new SingleValueItemObjectModel(c)).collect(Collectors.toList()));
ddModel.setLimitSerializationClasses(originDD.getLimitSerializationClasses());
return ddModel;
}
protected DeploymentDescriptor unmarshal(Path path,
DeploymentDescriptorModel model) {
if (model == null) {
return new DeploymentDescriptorManager().getDefaultDescriptor();
}
DeploymentDescriptor updated = new DeploymentDescriptorImpl();
updated.getBuilder()
.persistenceUnit(model.getPersistenceUnitName())
.auditPersistenceUnit(model.getAuditPersistenceUnitName())
.auditMode(AuditMode.valueOf(model.getAuditMode()))
.persistenceMode(PersistenceMode.valueOf(model.getPersistenceMode()))
.runtimeStrategy(RuntimeStrategy.valueOf(model.getRuntimeStrategy()))
.setLimitSerializationClasses(model.getLimitSerializationClasses());
// marshalling strategies
List<ItemObjectModel> marshallingStrategies = model.getMarshallingStrategies();
updated.getBuilder().setMarshalingStrategies(processToObjectModel(marshallingStrategies));
// event listeners
List<ItemObjectModel> eventListeners = model.getEventListeners();
updated.getBuilder().setEventListeners(processToObjectModel(eventListeners));
// globals
List<ItemObjectModel> globals = model.getGlobals();
updated.getBuilder().setGlobals(processToNamedObjectModel(globals));
// work item handlers
List<ItemObjectModel> workItemHandlers = model.getWorkItemHandlers();
updated.getBuilder().setWorkItemHandlers(processToNamedObjectModel(workItemHandlers));
// task event listeners
List<ItemObjectModel> taskEventListeners = model.getTaskEventListeners();
updated.getBuilder().setTaskEventListeners(processToObjectModel(taskEventListeners));
// environment entries
List<ItemObjectModel> environmentEntries = model.getEnvironmentEntries();
updated.getBuilder().setEnvironmentEntries(processToNamedObjectModel(environmentEntries));
// configuration
List<ItemObjectModel> configuration = model.getConfiguration();
updated.getBuilder().setConfiguration(processToNamedObjectModel(configuration));
// required roles
updated.getBuilder().setRequiredRoles(model.getRequiredRoles().stream().map( r -> r.getValue()).collect(Collectors.toList()));
// remoteable classes
updated.getBuilder().setClasses(model.getRemotableClasses().stream().map( c -> c.getValue()).collect(Collectors.toList()));
return updated;
}
private List<ItemObjectModel> processNamedObjectModel(List<NamedObjectModel> data) {
List<ItemObjectModel> result = null;
if (data != null) {
result = new ArrayList<ItemObjectModel>();
for (NamedObjectModel orig : data) {
List<Parameter> parameters = collectParameters(orig.getParameters());
result.add(new ItemObjectModel(orig.getName(),
orig.getIdentifier(),
orig.getResolver(),
parameters));
}
}
return result;
}
private List<ItemObjectModel> processObjectModel(List<ObjectModel> data) {
List<ItemObjectModel> result = null;
if (data != null) {
result = new ArrayList<ItemObjectModel>();
for (ObjectModel orig : data) {
List<Parameter> parameters = collectParameters(orig.getParameters());
result.add(new ItemObjectModel(null,
orig.getIdentifier(),
orig.getResolver(),
parameters));
}
}
return result;
}
private List<Parameter> collectParameters(List<Object> parameters) {
List<Parameter> result = null;
if (parameters != null && !parameters.isEmpty()) {
result = new ArrayList<Parameter>();
for (Object param : parameters) {
if (param instanceof ObjectModel) {
ObjectModel model = (ObjectModel) param;
result.add(new Parameter(model.getIdentifier(),
model.getParameters().get(0).toString()));
}
}
}
return result;
}
private List<ObjectModel> processToObjectModel(List<ItemObjectModel> data) {
List<ObjectModel> result = null;
if (data != null) {
result = new ArrayList<ObjectModel>();
for (ItemObjectModel item : data) {
ObjectModel ms = new ObjectModel(item.getResolver(),
item.getValue());
if (item.getParameters() != null) {
for (Parameter param : item.getParameters()) {
ObjectModel p = new ObjectModel(item.getResolver(),
param.getType(),
param.getValue().trim());
ms.addParameter(p);
}
}
result.add(ms);
}
}
return result;
}
private List<NamedObjectModel> processToNamedObjectModel(List<ItemObjectModel> data) {
List<NamedObjectModel> result = null;
if (data != null) {
result = new ArrayList<NamedObjectModel>();
for (ItemObjectModel item : data) {
NamedObjectModel ms = new NamedObjectModel(item.getResolver(),
item.getName(),
item.getValue());
if (item.getParameters() != null) {
for (Parameter param : item.getParameters()) {
ObjectModel p = new ObjectModel(item.getResolver(),
param.getType(),
param.getValue().trim());
ms.addParameter(p);
}
}
result.add(ms);
}
}
return result;
}
@Override
public void createIfNotExists(Path path) {
org.uberfire.java.nio.file.Path converted = Paths.convert(path);
if (!ioService.exists(converted)) {
// create descriptor
DeploymentDescriptor dd = new DeploymentDescriptorManager("org.jbpm.domain").getDefaultDescriptor();
if (configUpdaterHelper.hasPersistenceFile(path)) {
//if current project has a persistence.xml file configured add the JPAMarshalling strategy.
configUpdaterHelper.addJPAMarshallingStrategy(dd,
path);
}
String xmlDescriptor = dd.toXml();
ioService.write(converted,
xmlDescriptor);
}
}
@Override
public boolean accepts(Path path) {
return path.getFileName().equals("kie-deployment-descriptor.xml");
}
@Override
public List<ValidationMessage> validate(Path path) {
try {
InputStream input = ioService.newInputStream(Paths.convert(path));
DeploymentDescriptorModel ddModel = marshal(DeploymentDescriptorIO.fromXml(input));
return validate(path, ddModel);
} catch (Exception e) {
return Arrays.asList(
newMessage(
path,
e.getMessage(),
Level.ERROR,
I18N_KEY_UNEXPECTED_ERROR,
e.getMessage()));
}
}
}
| apache-2.0 |
sensui/guava-libraries | guava-tests/test/com/google/common/collect/LinkedHashMultimapTest.java | 15535 | /*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Sets.newHashSet;
import static com.google.common.collect.Sets.newLinkedHashSet;
import static com.google.common.collect.testing.Helpers.mapEntry;
import static com.google.common.collect.testing.IteratorFeature.MODIFIABLE;
import static java.util.Arrays.asList;
import static org.truth0.Truth.ASSERT;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.collect.testing.IteratorTester;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.features.MapFeature;
import com.google.common.collect.testing.google.SetMultimapTestSuiteBuilder;
import com.google.common.collect.testing.google.TestStringSetMultimapGenerator;
import com.google.common.testing.SerializableTester;
import junit.framework.Test;
import junit.framework.TestSuite;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* Unit tests for {@code LinkedHashMultimap}.
*
* @author Jared Levy
*/
@GwtCompatible(emulated = true)
public class LinkedHashMultimapTest extends AbstractSetMultimapTest {
@GwtIncompatible("suite")
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTest(SetMultimapTestSuiteBuilder.using(new TestStringSetMultimapGenerator() {
@Override
protected SetMultimap<String, String> create(Entry<String, String>[] entries) {
SetMultimap<String, String> multimap = LinkedHashMultimap.create();
for (Entry<String, String> entry : entries) {
multimap.put(entry.getKey(), entry.getValue());
}
return multimap;
}
})
.named("LinkedHashMultimap")
.withFeatures(
MapFeature.ALLOWS_NULL_KEYS,
MapFeature.ALLOWS_NULL_VALUES,
MapFeature.GENERAL_PURPOSE,
MapFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION,
CollectionFeature.SUPPORTS_ITERATOR_REMOVE,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.SERIALIZABLE,
CollectionSize.ANY)
.createTestSuite());
suite.addTestSuite(LinkedHashMultimapTest.class);
return suite;
}
@Override protected Multimap<String, Integer> create() {
return LinkedHashMultimap.create();
}
public void testValueSetHashTableExpansion() {
LinkedHashMultimap<String, Integer> multimap = LinkedHashMultimap.create();
for (int z = 1; z <= 100; z++) {
multimap.put("a", z);
// The Eclipse compiler (and hence GWT) rejects a parameterized cast.
@SuppressWarnings("unchecked")
LinkedHashMultimap<String, Integer>.ValueSet valueSet =
(LinkedHashMultimap.ValueSet) multimap.backingMap().get("a");
assertEquals(z, valueSet.size());
assertFalse(Hashing.needsResizing(valueSet.size(), valueSet.hashTable.length,
LinkedHashMultimap.VALUE_SET_LOAD_FACTOR));
}
}
private Multimap<String, Integer> initializeMultimap5() {
Multimap<String, Integer> multimap = getMultimap();
multimap.put("foo", 5);
multimap.put("bar", 4);
multimap.put("foo", 3);
multimap.put("cow", 2);
multimap.put("bar", 1);
return multimap;
}
public void testToString() {
assertEquals("{foo=[3, -1, 2, 4, 1], bar=[1, 2, 3]}",
createSample().toString());
}
public void testOrderingReadOnly() {
Multimap<String, Integer> multimap = initializeMultimap5();
assertOrderingReadOnly(multimap);
}
public void testOrderingUnmodifiable() {
Multimap<String, Integer> multimap = initializeMultimap5();
assertOrderingReadOnly(Multimaps.unmodifiableMultimap(multimap));
}
public void testOrderingSynchronized() {
Multimap<String, Integer> multimap = initializeMultimap5();
assertOrderingReadOnly(Multimaps.synchronizedMultimap(multimap));
}
@GwtIncompatible("SeriazableTester")
public void testSerializationOrdering() {
Multimap<String, Integer> multimap = initializeMultimap5();
Multimap<String, Integer> copy
= SerializableTester.reserializeAndAssert(multimap);
assertOrderingReadOnly(copy);
}
@GwtIncompatible("SeriazableTester")
public void testSerializationOrderingKeysAndEntries() {
Multimap<String, Integer> multimap = LinkedHashMultimap.create();
multimap.put("a", 1);
multimap.put("b", 2);
multimap.put("a", 3);
multimap.put("c", 4);
multimap.remove("a", 1);
multimap = SerializableTester.reserializeAndAssert(multimap);
ASSERT.that(multimap.keySet()).has().allOf("a", "b", "c").inOrder();
ASSERT.that(multimap.entries()).has().allOf(
mapEntry("b", 2),
mapEntry("a", 3),
mapEntry("c", 4)).inOrder();
// note that the keys and entries are in different orders
}
private void assertOrderingReadOnly(Multimap<String, Integer> multimap) {
ASSERT.that(multimap.get("foo")).has().allOf(5, 3).inOrder();
ASSERT.that(multimap.get("bar")).has().allOf(4, 1).inOrder();
ASSERT.that(multimap.get("cow")).has().item(2);
ASSERT.that(multimap.keySet()).has().allOf("foo", "bar", "cow").inOrder();
ASSERT.that(multimap.values()).has().allOf(5, 4, 3, 2, 1).inOrder();
Iterator<Map.Entry<String, Integer>> entryIterator =
multimap.entries().iterator();
assertEquals(Maps.immutableEntry("foo", 5), entryIterator.next());
assertEquals(Maps.immutableEntry("bar", 4), entryIterator.next());
assertEquals(Maps.immutableEntry("foo", 3), entryIterator.next());
assertEquals(Maps.immutableEntry("cow", 2), entryIterator.next());
assertEquals(Maps.immutableEntry("bar", 1), entryIterator.next());
Iterator<Map.Entry<String, Collection<Integer>>> collectionIterator =
multimap.asMap().entrySet().iterator();
Map.Entry<String, Collection<Integer>> entry = collectionIterator.next();
assertEquals("foo", entry.getKey());
ASSERT.that(entry.getValue()).has().allOf(5, 3).inOrder();
entry = collectionIterator.next();
assertEquals("bar", entry.getKey());
ASSERT.that(entry.getValue()).has().allOf(4, 1).inOrder();
entry = collectionIterator.next();
assertEquals("cow", entry.getKey());
ASSERT.that(entry.getValue()).has().item(2);
}
public void testOrderingUpdates() {
Multimap<String, Integer> multimap = initializeMultimap5();
ASSERT.that(multimap.replaceValues("foo", asList(6, 7))).has().allOf(5, 3).inOrder();
ASSERT.that(multimap.keySet()).has().allOf("foo", "bar", "cow").inOrder();
ASSERT.that(multimap.removeAll("foo")).has().allOf(6, 7).inOrder();
ASSERT.that(multimap.keySet()).has().allOf("bar", "cow").inOrder();
assertTrue(multimap.remove("bar", 4));
ASSERT.that(multimap.keySet()).has().allOf("bar", "cow").inOrder();
assertTrue(multimap.remove("bar", 1));
ASSERT.that(multimap.keySet()).has().item("cow");
multimap.put("bar", 9);
ASSERT.that(multimap.keySet()).has().allOf("cow", "bar").inOrder();
}
public void testToStringNullExact() {
Multimap<String, Integer> multimap = getMultimap();
multimap.put("foo", 3);
multimap.put("foo", -1);
multimap.put(null, null);
multimap.put("bar", 1);
multimap.put("foo", 2);
multimap.put(null, 0);
multimap.put("bar", 2);
multimap.put("bar", null);
multimap.put("foo", null);
multimap.put("foo", 4);
multimap.put(null, -1);
multimap.put("bar", 3);
multimap.put("bar", 1);
multimap.put("foo", 1);
assertEquals(
"{foo=[3, -1, 2, null, 4, 1], null=[null, 0, -1], bar=[1, 2, null, 3]}",
multimap.toString());
}
public void testPutMultimapOrdered() {
Multimap<String, Integer> multimap = LinkedHashMultimap.create();
multimap.putAll(initializeMultimap5());
assertOrderingReadOnly(multimap);
}
public void testKeysToString_ordering() {
Multimap<String, Integer> multimap = initializeMultimap5();
assertEquals("[foo x 2, bar x 2, cow]", multimap.keys().toString());
}
public void testCreate() {
LinkedHashMultimap<String, Integer> multimap = LinkedHashMultimap.create();
multimap.put("foo", 1);
multimap.put("bar", 2);
multimap.put("foo", 3);
assertEquals(ImmutableSet.of(1, 3), multimap.get("foo"));
}
public void testCreateFromMultimap() {
Multimap<String, Integer> multimap = createSample();
LinkedHashMultimap<String, Integer> copy =
LinkedHashMultimap.create(multimap);
assertEquals(multimap, copy);
}
public void testCreateFromSizes() {
LinkedHashMultimap<String, Integer> multimap
= LinkedHashMultimap.create(20, 15);
multimap.put("foo", 1);
multimap.put("bar", 2);
multimap.put("foo", 3);
assertEquals(ImmutableSet.of(1, 3), multimap.get("foo"));
}
public void testCreateFromIllegalSizes() {
try {
LinkedHashMultimap.create(-20, 15);
fail();
} catch (IllegalArgumentException expected) {}
try {
LinkedHashMultimap.create(20, -15);
fail();
} catch (IllegalArgumentException expected) {}
}
@GwtIncompatible("unreasonably slow")
public void testGetIteration() {
new IteratorTester<Integer>(6, MODIFIABLE,
newLinkedHashSet(asList(2, 3, 4, 7, 8)),
IteratorTester.KnownOrder.KNOWN_ORDER) {
private Multimap<String, Integer> multimap;
@Override protected Iterator<Integer> newTargetIterator() {
multimap = create();
multimap.putAll("foo", asList(2, 3, 4));
multimap.putAll("bar", asList(5, 6));
multimap.putAll("foo", asList(7, 8));
return multimap.get("foo").iterator();
}
@Override protected void verify(List<Integer> elements) {
assertEquals(newHashSet(elements), multimap.get("foo"));
}
}.test();
}
@GwtIncompatible("unreasonably slow")
public void testEntriesIteration() {
@SuppressWarnings("unchecked")
Set<Entry<String, Integer>> set = Sets.newLinkedHashSet(asList(
Maps.immutableEntry("foo", 2),
Maps.immutableEntry("foo", 3),
Maps.immutableEntry("bar", 4),
Maps.immutableEntry("bar", 5),
Maps.immutableEntry("foo", 6)));
new IteratorTester<Entry<String, Integer>>(6, MODIFIABLE, set,
IteratorTester.KnownOrder.KNOWN_ORDER) {
private Multimap<String, Integer> multimap;
@Override protected Iterator<Entry<String, Integer>> newTargetIterator() {
multimap = create();
multimap.putAll("foo", asList(2, 3));
multimap.putAll("bar", asList(4, 5));
multimap.putAll("foo", asList(6));
return multimap.entries().iterator();
}
@Override protected void verify(List<Entry<String, Integer>> elements) {
assertEquals(newHashSet(elements), multimap.entries());
}
}.test();
}
@GwtIncompatible("unreasonably slow")
public void testKeysIteration() {
new IteratorTester<String>(6, MODIFIABLE, newArrayList("foo", "foo", "bar",
"bar", "foo"), IteratorTester.KnownOrder.KNOWN_ORDER) {
private Multimap<String, Integer> multimap;
@Override protected Iterator<String> newTargetIterator() {
multimap = create();
multimap.putAll("foo", asList(2, 3));
multimap.putAll("bar", asList(4, 5));
multimap.putAll("foo", asList(6));
return multimap.keys().iterator();
}
@Override protected void verify(List<String> elements) {
assertEquals(elements, Lists.newArrayList(multimap.keys()));
}
}.test();
}
@GwtIncompatible("unreasonably slow")
public void testValuesIteration() {
new IteratorTester<Integer>(6, MODIFIABLE, newArrayList(2, 3, 4, 5, 6),
IteratorTester.KnownOrder.KNOWN_ORDER) {
private Multimap<String, Integer> multimap;
@Override protected Iterator<Integer> newTargetIterator() {
multimap = create();
multimap.putAll("foo", asList(2, 3));
multimap.putAll("bar", asList(4, 5));
multimap.putAll("foo", asList(6));
return multimap.values().iterator();
}
@Override protected void verify(List<Integer> elements) {
assertEquals(elements, Lists.newArrayList(multimap.values()));
}
}.test();
}
@GwtIncompatible("unreasonably slow")
public void testKeySetIteration() {
new IteratorTester<String>(6, MODIFIABLE,
newLinkedHashSet(asList("foo", "bar", "baz", "dog", "cat")),
IteratorTester.KnownOrder.KNOWN_ORDER) {
private Multimap<String, Integer> multimap;
@Override protected Iterator<String> newTargetIterator() {
multimap = create();
multimap.putAll("foo", asList(2, 3));
multimap.putAll("bar", asList(4, 5));
multimap.putAll("foo", asList(6));
multimap.putAll("baz", asList(7, 8));
multimap.putAll("dog", asList(9));
multimap.putAll("bar", asList(10, 11));
multimap.putAll("cat", asList(12, 13, 14));
return multimap.keySet().iterator();
}
@Override protected void verify(List<String> elements) {
assertEquals(newHashSet(elements), multimap.keySet());
}
}.test();
}
@GwtIncompatible("unreasonably slow")
public void testAsSetIteration() {
@SuppressWarnings("unchecked")
Set<Entry<String, Collection<Integer>>> set = newLinkedHashSet(asList(
Maps.immutableEntry("foo",
(Collection<Integer>) Sets.newHashSet(2, 3, 6)),
Maps.immutableEntry("bar",
(Collection<Integer>) Sets.newHashSet(4, 5, 10, 11)),
Maps.immutableEntry("baz",
(Collection<Integer>) Sets.newHashSet(7, 8)),
Maps.immutableEntry("dog",
(Collection<Integer>) Sets.newHashSet(9)),
Maps.immutableEntry("cat",
(Collection<Integer>) Sets.newHashSet(12, 13, 14))
));
new IteratorTester<Entry<String, Collection<Integer>>>(6, MODIFIABLE, set,
IteratorTester.KnownOrder.KNOWN_ORDER) {
private Multimap<String, Integer> multimap;
@Override protected Iterator<Entry<String, Collection<Integer>>>
newTargetIterator() {
multimap = create();
multimap.putAll("foo", asList(2, 3));
multimap.putAll("bar", asList(4, 5));
multimap.putAll("foo", asList(6));
multimap.putAll("baz", asList(7, 8));
multimap.putAll("dog", asList(9));
multimap.putAll("bar", asList(10, 11));
multimap.putAll("cat", asList(12, 13, 14));
return multimap.asMap().entrySet().iterator();
}
@Override protected void verify(
List<Entry<String, Collection<Integer>>> elements) {
assertEquals(newHashSet(elements), multimap.asMap().entrySet());
}
}.test();
}
}
| apache-2.0 |
apache/commons-fileupload | src/test/java/org/apache/commons/fileupload2/Util.java | 2585 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.fileupload2;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.fileupload2.disk.DiskFileItemFactory;
import org.apache.commons.fileupload2.portlet.PortletFileUpload;
import org.apache.commons.fileupload2.servlet.ServletFileUpload;
import org.apache.commons.fileupload2.servlet.ServletRequestContext;
/**
* Test utility methods.
*
* @since 1.4
*/
public class Util {
public static List<FileItem> parseUpload(final FileUpload upload, final byte[] bytes) throws FileUploadException {
return parseUpload(upload, bytes, Constants.CONTENT_TYPE);
}
public static List<FileItem> parseUpload(final FileUpload upload, final byte[] bytes, final String contentType)
throws FileUploadException {
final HttpServletRequest request = new MockHttpServletRequest(bytes, contentType);
return upload.parseRequest(new ServletRequestContext(request));
}
public static List<FileItem> parseUpload(final FileUpload upload, final String content)
throws UnsupportedEncodingException, FileUploadException {
final byte[] bytes = content.getBytes(StandardCharsets.US_ASCII);
return parseUpload(upload, bytes, Constants.CONTENT_TYPE);
}
/**
* Return a list of {@link FileUpload} implementations for parameterized tests.
* @return a list of {@link FileUpload} implementations
*/
public static List<FileUpload> fileUploadImplementations() {
return Arrays.asList(
new ServletFileUpload(new DiskFileItemFactory()),
new PortletFileUpload(new DiskFileItemFactory()));
}
}
| apache-2.0 |