repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
charithag/iot-server-samples | AndroidTVGateway/agent/app/src/main/java/org/wso2/androidtv/agent/util/dto/AccessTokenInfo.java | 1603 | /*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*
*/
package org.wso2.androidtv.agent.util.dto;
/**
* This hold access token info that returned from the api call
*/
public class AccessTokenInfo {
private String token_type;
private String expires_in;
private String refresh_token;
private String access_token;
public String getToken_type() {
return token_type;
}
public void setToken_type(String token_type) {
this.token_type = token_type;
}
public String getExpires_in() {
return expires_in;
}
public void setExpires_in(String expires_in) {
this.expires_in = expires_in;
}
public String getRefresh_token() {
return refresh_token;
}
public void setRefresh_token(String refresh_token) {
this.refresh_token = refresh_token;
}
public String getAccess_token() {
return access_token;
}
public void setAccess_token(String access_token) {
this.access_token = access_token;
}
}
| apache-2.0 |
jbosschina/cluster | jgroups/demo/src/main/java/org/jgroups/demo/test/ViewTest.java | 1223 | package org.jgroups.demo.test;
import org.jgroups.JChannel;
import org.jgroups.View;
public class ViewTest extends TestBase {
public void test() throws Exception {
System.out.println("'org.jgroups.View' test.");
viewchecker(15);
viewchecker(10);
viewchecker(5);
Thread.sleep(1000 * 15);
}
private void viewchecker(int time) throws Exception {
JChannel channel = new JChannel();
channel.connect("TestCluster");
View view = channel.getView();
System.out.println("View ID: " + view.getViewId());
System.out.println("View creater: " + view.getCreator());
System.out.println("View coordinator: " + view.getMembers().get(0));
new Thread(new ChannelCloseThread(channel, 1000 * time)).start();
}
private class ChannelCloseThread implements Runnable {
private JChannel channel;
private int time;
public ChannelCloseThread(JChannel channel, int time) {
super();
this.channel = channel;
this.time = time;
}
public void run() {
try {
Thread.sleep(time);
channel.close();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
public static void main(String[] args) throws Exception {
new ViewTest().test();
}
}
| apache-2.0 |
masonmei/java-agent | rpc/src/main/java/com/baidu/oped/apm/rpc/StateChangeEventListener.java | 993 | /*
*
* * Copyright 2014 NAVER Corp.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*
*/
package com.baidu.oped.apm.rpc;
import com.baidu.oped.apm.rpc.common.SocketStateCode;
/**
* @Author Taejin Koo
*/
public interface StateChangeEventListener<S extends ApmSocket> {
void eventPerformed(S apmSocket, SocketStateCode stateCode) throws Exception;
void exceptionCaught(S apmSocket, SocketStateCode stateCode, Throwable e);
}
| apache-2.0 |
RedHelixOrg/RedHelix-1 | redhx-build-all/redhx-core-api/src/main/java/org/redhelix/core/computer/system/processor/RedHxProcessorModel.java | 1003 | /*
* Copyright 2015 JBlade LLC
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License
*/
package org.redhelix.core.computer.system.processor;
import org.redhelix.core.util.RedHxStringProperty;
/**
*
*
*
* @since RedHelix Version 0.1
* @author Hank Bruning
*
*/
public interface RedHxProcessorModel extends RedHxStringProperty {
/**
* The maximum number of characters allowed in a Processor Model Name.
*/
public static final byte MAX_CHAR_COUNT_RED_HELIX_DEFINED = 30; // arbrartray.
}
| apache-2.0 |
theBoyMo/SpotifyStreamer | app/src/main/java/com/example/spotifystreamer/utils/Utils.java | 2403 | package com.example.spotifystreamer.utils;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.IBinder;
import android.view.inputmethod.InputMethodManager;
import android.widget.Toast;
import com.example.spotifystreamer.model.MyTrack;
/**
* Network connectivity method:
* http://developer.android.com/training/monitoring-device-state/connectivity-monitoring.html#DetermineConnection
*/
public class Utils {
private static final String LOG_TAG = Utils.class.getSimpleName();
private static final boolean L = true;
// ensure the Utils class can not be instantiated
private Utils() {
throw new AssertionError();
}
// display a Toast message to the user
public static void showToast(Context context, String string) {
Toast.makeText(context, string, Toast.LENGTH_SHORT).show();
}
// hide the keyboard on executing search
public static void hideKeyboard(Activity activity, IBinder windowToken) {
InputMethodManager mgr =
(InputMethodManager)activity.getSystemService(Context.INPUT_METHOD_SERVICE);
mgr.hideSoftInputFromWindow(windowToken, 0);
}
// check network connectivity, in case of drop offs
public static boolean isConnected(Context context) {
ConnectivityManager cm =
(ConnectivityManager)context.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo activeNetwork = cm.getActiveNetworkInfo();
return activeNetwork != null && activeNetwork.isConnectedOrConnecting();
}
// create a share intent which allows the user to share the track url
public static Intent getShareIntent(MyTrack track) {
Intent shareIntent = new Intent(Intent.ACTION_SEND);
//Intent shareIntent = new Intent(Intent.ACTION_VIEW);
shareIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK); // don't place sharing activity in backstack
shareIntent.setType("text/plain");
String previewUrl = track.getPreviewUrl();
shareIntent.putExtra(Intent.EXTRA_SUBJECT, "Thought you might like to hear a snippet of "
+ track.getTrackTitle() + ", by " + track.getArtistName());
shareIntent.putExtra(Intent.EXTRA_TEXT, previewUrl);
return shareIntent;
}
}
| apache-2.0 |
PetrGasparik/cas | cas-server-support-radius/src/main/java/org/jasig/cas/config/RadiusConfiguration.java | 3922 | package org.jasig.cas.config;
import org.jasig.cas.adaptors.radius.JRadiusServerImpl;
import org.jasig.cas.adaptors.radius.RadiusClientFactory;
import org.jasig.cas.adaptors.radius.RadiusProtocol;
import org.jasig.cas.adaptors.radius.RadiusServer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.ArrayList;
import java.util.List;
/**
* This this {@link RadiusConfiguration}.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
@Configuration("radiusConfiguration")
public class RadiusConfiguration {
/**
* The Protocol.
*/
@Value("${cas.radius.server.protocol:EAP_MSCHAPv2}")
private RadiusProtocol protocol;
/**
* The Retries.
*/
@Value("${cas.radius.server.retries:3}")
private int retries;
/**
* The Nas identifier.
*/
@Value("${cas.radius.server.nasIdentifier:-1}")
private long nasIdentifier;
/**
* The Nas port.
*/
@Value("${cas.radius.server.nasPort:-1}")
private long nasPort;
/**
* The Nas port id.
*/
@Value("${cas.radius.server.nasPortId:-1}")
private long nasPortId;
/**
* The Nas real port.
*/
@Value("${cas.radius.server.nasRealPort:-1}")
private long nasRealPort;
/**
* The Nas port type.
*/
@Value("${cas.radius.server.nasPortType:-1}")
private int nasPortType;
/**
* The Nas ip.
*/
@Value("${cas.radius.server.nasIpAddress:}")
private String nasIp;
/**
* The Nas ipv 6.
*/
@Value("${cas.radius.server.nasIpv6Address:}")
private String nasIpv6;
/**
* The Inet address.
*/
@Value("${cas.radius.client.inetaddr:localhost}")
private String inetAddress;
/**
* The Accounting port.
*/
@Value("${cas.radius.client.port.acct:" + RadiusServer.DEFAULT_PORT_ACCOUNTING + "}")
private int accountingPort;
/**
* The Authentication port.
*/
@Value("${cas.radius.client.port.authn:" + RadiusServer.DEFAULT_PORT_AUTHENTICATION + "}")
private int authenticationPort;
/**
* The Socket timeout.
*/
@Value("${cas.radius.client.socket.timeout:60}")
private int socketTimeout;
/**
* The Shared secret.
*/
@Value("${cas.radius.client.sharedsecret:N0Sh@ar3d$ecReT}")
private String sharedSecret;
/**
* Radius server j radius server.
*
* @return the j radius server
*/
@Bean(name="radiusServer")
public JRadiusServerImpl radiusServer() {
final JRadiusServerImpl impl = new JRadiusServerImpl(this.protocol, radiusClientFactory());
impl.setRetries(this.retries);
impl.setNasIdentifier(this.nasIdentifier);
impl.setNasPort(this.nasPort);
impl.setNasPortId(this.nasPortId);
impl.setNasRealPort(this.nasRealPort);
impl.setNasIpAddress(this.nasIp);
impl.setNasIpv6Address(this.nasIpv6);
return impl;
}
/**
* Radius servers list.
*
* @return the list
*/
@Bean(name="radiusServers")
public List radiusServers() {
final List<JRadiusServerImpl> list = new ArrayList<>();
list.add(radiusServer());
return list;
}
/**
* Radius client factory radius client factory.
*
* @return the radius client factory
*/
@Bean(name="radiusClientFactory")
public RadiusClientFactory radiusClientFactory() {
final RadiusClientFactory factory = new RadiusClientFactory();
factory.setAccountingPort(this.accountingPort);
factory.setAuthenticationPort(this.authenticationPort);
factory.setInetAddress(this.inetAddress);
factory.setSharedSecret(this.sharedSecret);
factory.setSocketTimeout(this.socketTimeout);
return factory;
}
}
| apache-2.0 |
equella/Equella | Source/Plugins/Core/com.equella.core/src/com/tle/web/connectors/brightspace/editor/BrightspaceConnectorEditor.java | 12821 | /*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tle.web.connectors.brightspace.editor;
import com.google.common.base.Strings;
import com.tle.common.connectors.entity.Connector;
import com.tle.core.connectors.brightspace.BrightspaceConnectorConstants;
import com.tle.core.connectors.brightspace.service.BrightspaceConnectorService;
import com.tle.core.connectors.service.ConnectorEditingBean;
import com.tle.core.connectors.service.ConnectorEditingSession;
import com.tle.core.entity.EntityEditingSession;
import com.tle.core.guice.Bind;
import com.tle.core.institution.InstitutionService;
import com.tle.core.services.user.UserSessionService;
import com.tle.web.connectors.dialog.LMSAuthDialog;
import com.tle.web.connectors.dialog.LMSAuthDialog.LMSAuthUrlCallable;
import com.tle.web.connectors.editor.AbstractConnectorEditorSection;
import com.tle.web.freemarker.FreemarkerFactory;
import com.tle.web.freemarker.annotations.ViewFactory;
import com.tle.web.sections.SectionInfo;
import com.tle.web.sections.SectionTree;
import com.tle.web.sections.annotations.EventFactory;
import com.tle.web.sections.annotations.EventHandlerMethod;
import com.tle.web.sections.equella.annotation.PlugKey;
import com.tle.web.sections.events.RenderEventContext;
import com.tle.web.sections.events.js.EventGenerator;
import com.tle.web.sections.render.Label;
import com.tle.web.sections.render.SectionRenderable;
import com.tle.web.sections.result.util.KeyLabel;
import com.tle.web.sections.standard.Button;
import com.tle.web.sections.standard.Div;
import com.tle.web.sections.standard.TextField;
import com.tle.web.sections.standard.annotations.Component;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
@SuppressWarnings("nls")
@Bind
public class BrightspaceConnectorEditor
extends AbstractConnectorEditorSection<
BrightspaceConnectorEditor.BrightspaceConnectorEditorModel> {
private static final String POSTFIX_KEY = ".BrightspaceConnectorEditor";
@PlugKey("editor.testapp.ok")
private static Label TEST_APP_OK;
@PlugKey("editor.testapp.fail")
private static Label TEST_APP_FAIL;
@PlugKey("editor.admin.ok")
private static Label ADMIN_OK;
@PlugKey("editor.admin.fail")
private static Label ADMIN_FAIL;
@PlugKey("editor.validation.appid")
private static Label BAD_APP_ID_ERROR;
@PlugKey("editor.validation.appkey")
private static Label BAD_APP_KEY_ERROR;
@PlugKey("editor.validation.testapp")
private static Label NOT_TESTED_ERROR;
@PlugKey("editor.admin.signedinas")
private static String KEY_SIGNED_IN_AS;
@Component(stateful = false)
private TextField appId;
@Component(stateful = false)
private TextField appKey;
@Component
@PlugKey("editor.button.testapp")
private Button testAppButton;
@Inject @Component private LMSAuthDialog authDialog;
@ViewFactory private FreemarkerFactory view;
@EventFactory private EventGenerator events;
@Inject private BrightspaceConnectorService brightspaceConnectorService;
@Inject private InstitutionService institutionService;
@Inject private UserSessionService userSessionService;
@PostConstruct
public void init() {
authDialog.setAuthUrlCallable(
new LMSAuthUrlCallable() {
@Override
public String getAuthorisationUrl(SectionInfo info, String forwardUrl) {
final BrightspaceConnectorEditorModel model = getModel(info);
final ConnectorEditingSession session =
getEntityService().loadSession(model.getSessionId());
final ConnectorEditingBean connector = session.getBean();
return brightspaceConnectorService.getAuthorisationUrl(
connector.getAttribute(BrightspaceConnectorConstants.FIELD_APP_ID),
connector.getAttribute(BrightspaceConnectorConstants.FIELD_APP_KEY),
connector.getServerUrl(),
forwardUrl,
POSTFIX_KEY);
}
});
}
@Override
protected SectionRenderable renderFields(
RenderEventContext context, EntityEditingSession<ConnectorEditingBean, Connector> session) {
final BrightspaceConnectorEditorModel model = getModel(context);
final ConnectorEditingBean connector = session.getBean();
if (model.getTestedUrl() != null) {
model.setTrustedUrl(
institutionService.institutionalise(BrightspaceConnectorConstants.AUTH_URL));
final String appOkStr = connector.getAttribute(BrightspaceConnectorConstants.FIELD_APP_OK);
if (appOkStr != null) {
final boolean appOk = Boolean.parseBoolean(appOkStr);
if (appOk) {
model.setLtiConsumersUrl(institutionService.institutionalise("access/lticonsumers.do"));
}
model.setTestAppStatusClass(appOk ? "ok" : "fail");
model.setTestAppStatus(appOk ? TEST_APP_OK.getText() : TEST_APP_FAIL.getText());
model.setAppOk(appOk);
}
final String adminOkStr =
connector.getAttribute(BrightspaceConnectorConstants.FIELD_ADMIN_OK);
if (adminOkStr != null) {
final boolean adminOk = Boolean.parseBoolean(adminOkStr);
model.setAdminStatusClass(adminOk ? "ok" : "fail");
final String adminStatusText;
if (adminOk) {
final String adminUsername =
connector.getAttribute(BrightspaceConnectorConstants.FIELD_ADMIN_USERNAME);
adminStatusText =
(adminUsername != null
? new KeyLabel(KEY_SIGNED_IN_AS, adminUsername).getText()
: ADMIN_OK.getText());
} else {
adminStatusText = ADMIN_FAIL.getText();
}
model.setAdminStatus(adminStatusText);
model.setAdminOk(adminOk);
}
}
return view.createResult("brightspaceconnector.ftl", context);
}
@Override
public void registered(String id, SectionTree tree) {
super.registered(id, tree);
testAppButton.setClickHandler(
ajax.getAjaxUpdateDomFunction(
tree, this, events.getEventHandler("testApp"), getAjaxDivId()));
authDialog.setOkCallback(events.getSubmitValuesFunction("adminSignIn"));
}
@EventHandlerMethod
public void testApp(SectionInfo info) {
final EntityEditingSession<ConnectorEditingBean, Connector> session = saveToSession(info);
final ConnectorEditingBean connector = session.getBean();
final String testResponse =
brightspaceConnectorService.testApplication(
appId.getValue(info), appKey.getValue(info), connector.getServerUrl());
final boolean appOk = "ok".equals(testResponse);
connector.setAttribute(BrightspaceConnectorConstants.FIELD_APP_OK, appOk);
getEntityService().saveSession(session);
}
@EventHandlerMethod
public void adminSignIn(SectionInfo info) {
final EntityEditingSession<ConnectorEditingBean, Connector> session = saveToSession(info);
final ConnectorEditingBean connector = session.getBean();
final String userId =
userSessionService.getAttribute(
BrightspaceConnectorConstants.SESSION_KEY_USER_ID + POSTFIX_KEY);
final String userKey =
userSessionService.getAttribute(
BrightspaceConnectorConstants.SESSION_KEY_USER_KEY + POSTFIX_KEY);
connector.setAttribute(BrightspaceConnectorConstants.FIELD_ADMIN_USER_ID, userId);
connector.setAttribute(
BrightspaceConnectorConstants.FIELD_ADMIN_USER_KEY,
brightspaceConnectorService.encrypt(userKey));
userSessionService.removeAttribute(
BrightspaceConnectorConstants.SESSION_KEY_USER_ID + POSTFIX_KEY);
userSessionService.removeAttribute(
BrightspaceConnectorConstants.SESSION_KEY_USER_KEY + POSTFIX_KEY);
final String username =
brightspaceConnectorService.whoAmI(
connector.getAttribute(BrightspaceConnectorConstants.FIELD_APP_ID),
connector.getAttribute(BrightspaceConnectorConstants.FIELD_APP_KEY),
connector.getServerUrl(),
userId,
userKey);
connector.setAttribute(BrightspaceConnectorConstants.FIELD_ADMIN_USERNAME, username);
// TODO: test the admin account for privs?
connector.setAttribute(
BrightspaceConnectorConstants.FIELD_ADMIN_OK,
userId != null && userKey != null && username != null);
getEntityService().saveSession(session);
}
@Override
protected void customValidate(
SectionInfo info, ConnectorEditingBean connector, Map<String, Object> errors) {
final BrightspaceConnectorEditorModel model = getModel(info);
if (Strings.isNullOrEmpty(appId.getValue(info))) {
errors.put("appid", BAD_APP_ID_ERROR.getText());
return;
}
if (Strings.isNullOrEmpty(appKey.getValue(info))) {
errors.put("appkey", BAD_APP_KEY_ERROR.getText());
return;
}
if (!connector.getAttribute(BrightspaceConnectorConstants.FIELD_APP_OK, false)) {
errors.put("testapp", NOT_TESTED_ERROR.getText());
}
model.setErrors(errors);
}
@Override
protected void customLoad(SectionInfo info, ConnectorEditingBean connector) {
appId.setValue(info, connector.getAttribute(BrightspaceConnectorConstants.FIELD_APP_ID));
appKey.setValue(info, connector.getAttribute(BrightspaceConnectorConstants.FIELD_APP_KEY));
}
@Override
protected void customSave(SectionInfo info, ConnectorEditingBean connector) {
connector.setAttribute(BrightspaceConnectorConstants.FIELD_APP_ID, appId.getValue(info));
connector.setAttribute(BrightspaceConnectorConstants.FIELD_APP_KEY, appKey.getValue(info));
}
@Override
protected Connector createNewConnector() {
return new Connector(BrightspaceConnectorConstants.CONNECTOR_TYPE);
}
@Override
protected String getAjaxDivId() {
return "brightspacesetup";
}
@Override
public Object instantiateModel(SectionInfo info) {
return new BrightspaceConnectorEditorModel();
}
@Override
public Div getUsernameDiv() {
return null;
}
public TextField getAppId() {
return appId;
}
public TextField getAppKey() {
return appKey;
}
public Button getTestAppButton() {
return testAppButton;
}
public LMSAuthDialog getAuthDialog() {
return authDialog;
}
public class BrightspaceConnectorEditorModel
extends AbstractConnectorEditorSection<BrightspaceConnectorEditorModel>
.AbstractConnectorEditorModel {
private boolean appOk;
private String testAppStatus;
private String testAppStatusClass;
private String trustedUrl;
private String ltiConsumersUrl;
private boolean adminOk;
private String adminStatus;
private String adminStatusClass;
public boolean isAppOk() {
return appOk;
}
public void setAppOk(boolean appOk) {
this.appOk = appOk;
}
public String getTestAppStatus() {
return testAppStatus;
}
public void setTestAppStatus(String testAppStatus) {
this.testAppStatus = testAppStatus;
}
public String getTestAppStatusClass() {
return testAppStatusClass;
}
public void setTestAppStatusClass(String testAppStatusClass) {
this.testAppStatusClass = testAppStatusClass;
}
public String getTrustedUrl() {
return trustedUrl;
}
public void setTrustedUrl(String trustedUrl) {
this.trustedUrl = trustedUrl;
}
public String getLtiConsumersUrl() {
return ltiConsumersUrl;
}
public void setLtiConsumersUrl(String ltiConsumersUrl) {
this.ltiConsumersUrl = ltiConsumersUrl;
}
public boolean isAdminOk() {
return adminOk;
}
public void setAdminOk(boolean adminOk) {
this.adminOk = adminOk;
}
public String getAdminStatus() {
return adminStatus;
}
public void setAdminStatus(String adminStatus) {
this.adminStatus = adminStatus;
}
public String getAdminStatusClass() {
return adminStatusClass;
}
public void setAdminStatusClass(String adminStatusClass) {
this.adminStatusClass = adminStatusClass;
}
}
}
| apache-2.0 |
ifgeny87/Inno-Classroom-Work | src/main/java/ru/innolearn/day25/patterns/decorator_test/Кофейко.java | 304 | package ru.innolearn.day25.patterns.decorator_test;
/**
* Created in project Inno-Classroom-Work on 17.01.17
*/
public class Кофейко implements КофейныйКомпонент
{
@Override
public void показатьИнгридиенты()
{
System.out.println("Кофейко");
}
}
| apache-2.0 |
cvazquezlos/LOGANALYZER | testloganalyzer/src/main/java/elastest/loganalyzer/es/client/model/Project.java | 1304 | package elastest.loganalyzer.es.client.model;
import java.util.List;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
@Document(indexName = "projects", type = "projects")
public class Project {
public static Project findGreater(List<Project> projects) {
Project project;
if (projects.size() == 0) {
project = new Project();
project.setId(-1);
} else {
project = projects.get(0);
for (int i = 1; i < projects.size(); i++) {
if (project.getId() < projects.get(i).getId()) {
project = projects.get(i);
}
}
}
return project;
}
@Id
private int id;
private String name;
private int num_execs;
public Project() {
}
public Project(int id, String name, int num_execs) {
this.id = id;
this.name = name;
this.num_execs = num_execs;
}
public int getId() {
return id;
}
public String getName() {
return name;
}
public int getNum_execs() {
return num_execs;
}
public void setId(int id) {
this.id = id;
}
public void setName(String name) {
this.name = name;
}
public void setNum_execs(int num_execs) {
this.num_execs = num_execs;
}
@Override
public String toString() {
return "Project [id=" + id + ", name=" + name + ", num_execs=" + num_execs + "]";
}
}
| apache-2.0 |
tripodsan/jackrabbit-oak | oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexNode.java | 5607 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.lucene;
import static com.google.common.base.Preconditions.checkState;
import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.INDEX_DATA_CHILD_NAME;
import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.PERSISTENCE_FILE;
import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.PERSISTENCE_NAME;
import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.PERSISTENCE_PATH;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.annotation.Nullable;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.index.lucene.util.SuggestHelper;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.ReadOnlyBuilder;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.suggest.analyzing.AnalyzingInfixSuggester;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
class IndexNode {
static IndexNode open(String indexPath, NodeState root, NodeState defnNodeState, @Nullable IndexCopier cloner)
throws IOException {
Directory directory = null;
IndexDefinition definition = new IndexDefinition(root, defnNodeState);
NodeState data = defnNodeState.getChildNode(INDEX_DATA_CHILD_NAME);
if (data.exists()) {
directory = new OakDirectory(new ReadOnlyBuilder(defnNodeState), definition, true);
if (cloner != null) {
directory = cloner.wrapForRead(indexPath, definition, directory);
}
} else if (PERSISTENCE_FILE.equalsIgnoreCase(defnNodeState.getString(PERSISTENCE_NAME))) {
String path = defnNodeState.getString(PERSISTENCE_PATH);
if (path != null && new File(path).exists()) {
directory = FSDirectory.open(new File(path));
}
}
if (directory != null) {
try {
OakDirectory suggestDirectory = null;
if (definition.isSuggestEnabled()) {
suggestDirectory = new OakDirectory(defnNodeState.builder(), ":suggest-data", definition, false);
}
IndexNode index = new IndexNode(PathUtils.getName(indexPath), definition, directory, suggestDirectory);
directory = null; // closed in Index.close()
return index;
} finally {
if (directory != null) {
directory.close();
}
}
}
return null;
}
private final String name;
private final IndexDefinition definition;
private final Directory directory;
private final Directory suggestDirectory;
private final IndexReader reader;
private final IndexSearcher searcher;
private final ReadWriteLock lock = new ReentrantReadWriteLock();
private final AnalyzingInfixSuggester lookup;
private boolean closed = false;
IndexNode(String name, IndexDefinition definition, Directory directory, final OakDirectory suggestDirectory)
throws IOException {
this.name = name;
this.definition = definition;
this.directory = directory;
this.reader = DirectoryReader.open(directory);
this.searcher = new IndexSearcher(reader);
this.suggestDirectory = suggestDirectory;
if (suggestDirectory != null) {
this.lookup = SuggestHelper.getLookup(suggestDirectory, definition.getAnalyzer());
} else {
this.lookup = null;
}
}
String getName() {
return name;
}
IndexDefinition getDefinition() {
return definition;
}
IndexSearcher getSearcher() {
return searcher;
}
Directory getSuggestDirectory() {
return suggestDirectory;
}
AnalyzingInfixSuggester getLookup() {
return lookup;
}
boolean acquire() {
lock.readLock().lock();
if (closed) {
lock.readLock().unlock();
return false;
} else {
return true;
}
}
void release() {
lock.readLock().unlock();
}
void close() throws IOException {
lock.writeLock().lock();
try {
checkState(!closed);
closed = true;
} finally {
lock.writeLock().unlock();
}
try {
reader.close();
} finally {
directory.close();
}
}
}
| apache-2.0 |
GregoryBevan/security | src/main/java/com/elgregos/security/authentication/PersonalLoginModule.java | 3970 | package com.elgregos.security.authentication;
import java.io.IOException;
import java.security.Principal;
import java.security.acl.Group;
import java.util.Map;
import java.util.Set;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.security.auth.Subject;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.LoginException;
import javax.security.auth.spi.LoginModule;
import lombok.extern.slf4j.Slf4j;
import com.elgregos.security.data.entities.Role;
import com.elgregos.security.service.LoginVerifier;
@Slf4j
public class PersonalLoginModule implements LoginModule {
private Subject subject;
private CallbackHandler callbackHandler;
private Map<String, ?> sharedState;
private Map<String, ?> options;
private UserPrincipal userPrincipal;
private LoginVerifier loginVerifier;
private boolean succeeded = false;
@Override
public void initialize(final Subject subject, final CallbackHandler callbackHandler, final Map<String, ?> sharedState,
final Map<String, ?> options) {
log.info("init login module");
this.subject = subject;
this.callbackHandler = callbackHandler;
this.sharedState = sharedState;
this.options = options;
try {
this.loginVerifier = (LoginVerifier) new InitialContext().lookup("java:global/security/service/LoginVerifier");
} catch (final NamingException e) {
throw new IllegalStateException("Can't lookup LoginVerifier service");
}
}
@Override
public boolean login() throws LoginException {
System.out.println("login");
if (this.callbackHandler == null) {
throw new LoginException("CallbackHandler can't be null");
}
final Callback[] callbacks = new Callback[2];
callbacks[0] = new NameCallback("name:");
callbacks[1] = new PasswordCallback("password:", false);
try {
this.callbackHandler.handle(callbacks);
} catch (IOException | UnsupportedCallbackException e) {
throw new LoginException("CallbackHandler error");
}
final String email = ((NameCallback) callbacks[0]).getName();
final PasswordCallback passwordCallback = (PasswordCallback) callbacks[1];
final String password = String.valueOf(passwordCallback.getPassword());
passwordCallback.clearPassword();
this.succeeded = this.loginVerifier.checkLogin(email, password);
if (this.succeeded) {
createIdentity(email);
}
return this.succeeded;
}
private void createIdentity(final String username) {
if (this.userPrincipal == null) {
this.userPrincipal = new UserPrincipal(username);
}
}
@Override
public boolean commit() throws LoginException {
System.out.println("commit");
final Set<Principal> principals = this.subject.getPrincipals();
if (!principals.contains(this.userPrincipal)) {
principals.add(this.userPrincipal);
}
final Role[] roleList = getRoles(this.userPrincipal);
// principals.add(new RolePrincipal(Role.USER));
final Group roles = new PersonalGroup("Roles");
for (final Role role : roleList) {
final Group group = new PersonalGroup(role.toString());
group.addMember(this.userPrincipal);
roles.addMember(group);
}
principals.add(roles);
// roles.add(Role.USER);
// for (final Role role : roles) {
// principals.add(new RolePrincipal(role));
// }
return true;
}
@Override
public boolean abort() throws LoginException {
System.out.println("abort");
return false;
}
@Override
public boolean logout() throws LoginException {
System.out.println("abort");
return false;
}
private Role[] getRoles(final UserPrincipal principal) {
return this.loginVerifier.getUserRoles(principal.getName());
}
}
| apache-2.0 |
ernestp/consulo | platform/platform-api/src/com/intellij/openapi/actionSystem/ex/ActionManagerEx.java | 3530 | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.actionSystem.ex;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.extensions.PluginId;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.event.InputEvent;
import java.util.Comparator;
public abstract class ActionManagerEx extends ActionManager {
public static ActionManagerEx getInstanceEx() {
return (ActionManagerEx)getInstance();
}
public abstract ActionToolbar createActionToolbar(String place, ActionGroup group, boolean horizontal, boolean decorateButtons);
public abstract void fireBeforeActionPerformed(AnAction action, DataContext dataContext, AnActionEvent event);
public abstract void fireAfterActionPerformed(AnAction action, DataContext dataContext, AnActionEvent event);
public abstract void fireBeforeEditorTyping(char c, DataContext dataContext);
/**
* For logging purposes
*/
public abstract String getLastPreformedActionId();
public abstract String getPrevPreformedActionId();
/**
* Comparator compares action ids (String) on order of action registration.
*
* @return a negative integer if action that corresponds to the first id was registered earler than the action that corresponds
* <p/>
* to the second id; zero if both ids are equal; a positive number otherwise.
*/
public abstract Comparator<String> getRegistrationOrderComparator();
/**
* Similar to {@link KeyStroke#getKeyStroke(String)} but allows keys in lower case.
* <p/>
* I.e. "control x" is accepted and interpreted as "control X".
*
* @return null if string cannot be parsed.
*/
@Nullable
public static KeyStroke getKeyStroke(String s) {
KeyStroke result = null;
try {
result = KeyStroke.getKeyStroke(s);
}
catch (Exception ex) {
//ok
}
if (result == null && s != null && s.length() >= 2 && s.charAt(s.length() - 2) == ' ') {
try {
String s1 = s.substring(0, s.length() - 1) + Character.toUpperCase(s.charAt(s.length() - 1));
result = KeyStroke.getKeyStroke(s1);
}
catch (Exception ex) {
// ok
}
}
return result;
}
public abstract String[] getPluginActions(PluginId pluginId);
public abstract void queueActionPerformedEvent(final AnAction action, DataContext context, AnActionEvent event);
public abstract boolean isActionPopupStackEmpty();
public abstract boolean isTransparentOnlyActionsUpdateNow();
public void fireBeforeActionPerformed(String actionId, InputEvent event) {
final AnAction action = getAction(actionId);
if (action != null) {
final DataContext context = DataManager.getInstance().getDataContext();
final AnActionEvent e = new AnActionEvent(event, context, ActionPlaces.UNKNOWN, action.getTemplatePresentation(), this, 0);
fireBeforeActionPerformed(action, context, e);
}
}
}
| apache-2.0 |
talsma-ict/umldoclet | src/plantuml-asl/src/net/sourceforge/plantuml/eggs/PSystemEgg.java | 1683 | /* ========================================================================
* PlantUML : a free UML diagram generator
* ========================================================================
*
* (C) Copyright 2009-2020, Arnaud Roques
*
* Project Info: https://plantuml.com
*
* If you like this project or if you find it useful, you can support us at:
*
* https://plantuml.com/patreon (only 1$ per month!)
* https://plantuml.com/paypal
*
* This file is part of PlantUML.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* Original Author: Arnaud Roques
*/
package net.sourceforge.plantuml.eggs;
import net.sourceforge.plantuml.PlainStringsDiagram;
import net.sourceforge.plantuml.core.DiagramDescription;
import net.sourceforge.plantuml.core.UmlSource;
import java.util.StringTokenizer;
public class PSystemEgg extends PlainStringsDiagram {
PSystemEgg(UmlSource source, String sentence) {
super(source);
final StringTokenizer st = new StringTokenizer(sentence, "|");
while (st.hasMoreTokens()) {
strings.add(st.nextToken());
}
}
public DiagramDescription getDescription() {
return new DiagramDescription("(Easter Eggs)");
}
}
| apache-2.0 |
piraso/piraso-sql | nbm-client/src/main/java/org/piraso/ui/sql/provider/SQLEntryRowRenderingProviderImpl.java | 2467 | /*
* Copyright (c) 2012 Alvin R. de Leon. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.piraso.ui.sql.provider;
import org.openide.util.lookup.ServiceProvider;
import org.piraso.api.entry.Entry;
import org.piraso.api.sql.SQLDataViewEntry;
import org.piraso.api.sql.SQLPreferenceEnum;
import org.piraso.ui.api.EntryRowColumn;
import org.piraso.ui.api.EntryRowRenderingProvider;
import javax.swing.*;
import java.awt.*;
/**
* Provides rendering to general entry types.
*/
@ServiceProvider(service=EntryRowRenderingProvider.class)
public class SQLEntryRowRenderingProviderImpl implements EntryRowRenderingProvider {
@Override
public boolean isSupported(Entry entry) {
return SQLPreferenceEnum.CONNECTION_ENABLED.getPropertyName().equals(entry.getLevel()) ||
SQLPreferenceEnum.VIEW_SQL_ENABLED.getPropertyName().equals(entry.getLevel()) ||
SQLPreferenceEnum.RESULTSET_ENABLED.getPropertyName().equals(entry.getLevel());
}
@Override
public void render(JLabel cell, Entry entry, EntryRowColumn column) {
if(SQLPreferenceEnum.CONNECTION_ENABLED.getPropertyName().equals(entry.getLevel())) {
cell.setBackground(new Color(0xDAE9F6));
cell.setForeground(new Color(98, 143, 181));
cell.setFont(cell.getFont().deriveFont(Font.BOLD));
} else if(SQLPreferenceEnum.VIEW_SQL_ENABLED.getPropertyName().equals(entry.getLevel())) {
cell.setBackground(new Color(0xBAEEBA));
cell.setForeground(new Color(0x008000));
cell.setFont(cell.getFont().deriveFont(Font.BOLD));
} else if(SQLPreferenceEnum.RESULTSET_ENABLED.getPropertyName().equals(entry.getLevel())) {
cell.setForeground(new Color(98, 143, 181));
if (SQLDataViewEntry.class.isInstance(entry)) {
cell.setFont(cell.getFont().deriveFont(Font.BOLD));
}
}
}
}
| apache-2.0 |
jmartisk/hibernate-validator | engine/src/main/java/org/hibernate/validator/internal/xml/XmlParserHelper.java | 5663 | /*
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat, Inc. and/or its affiliates, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hibernate.validator.internal.xml;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.Attribute;
import javax.xml.stream.events.StartElement;
import javax.xml.stream.events.XMLEvent;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import org.xml.sax.SAXException;
import org.hibernate.validator.internal.util.Contracts;
import org.hibernate.validator.internal.util.ReflectionHelper;
import org.hibernate.validator.internal.util.logging.Log;
import org.hibernate.validator.internal.util.logging.LoggerFactory;
import static org.hibernate.validator.internal.util.logging.Messages.MESSAGES;
/**
* Provides common functionality used within the different XML descriptor
* parsers.
*
* @author Gunnar Morling
*/
public class XmlParserHelper {
private static final Log log = LoggerFactory.make();
/**
* The expected number of XML schemas managed by this class. Used to set the
* initial cache size.
*/
private static final int NUMBER_OF_SCHEMAS = 4;
private static final String DEFAULT_VERSION = "1.0";
// xmlInputFactory used to be static in order to cache the factory, but that introduced a leakage of
// class loader in Wildfly. See HV-842
private final XMLInputFactory xmlInputFactory = XMLInputFactory.newInstance();
/**
* Read limit for the buffered input stream. Resetting the stream after
* reading the version attribute will fail, if this has required reading
* more bytes than this limit (1MB) from the stream. Practically, this
* should never happen.
*/
private static final int READ_LIMIT = 1024 * 1024;
private static final ConcurrentMap<String, Schema> schemaCache = new ConcurrentHashMap<String, Schema>(
NUMBER_OF_SCHEMAS
);
/**
* Retrieves the schema version applying for the given XML input stream as
* represented by the "version" attribute of the root element of the stream.
*
* @param resourceName The name of the represented XML resource.
* @param xmlInputStream An input stream representing an XML resource. Must support the
* {@link InputStream#mark(int)} and {@link InputStream#reset()}
* methods.
*
* @return The value of the "version" attribute. For compatibility with BV
* 1.0, "1.0" will be returned if the given stream doesn't have a
* "version" attribute.
*/
public String getSchemaVersion(String resourceName, InputStream xmlInputStream) {
Contracts.assertNotNull( xmlInputStream, MESSAGES.parameterMustNotBeNull( "xmlInputStream" ) );
xmlInputStream.mark( READ_LIMIT );
try {
XMLEventReader xmlEventReader = createXmlEventReader( xmlInputStream );
StartElement rootElement = getRootElement( xmlEventReader );
return getVersionValue( rootElement );
}
catch ( XMLStreamException e ) {
throw log.getUnableToDetermineSchemaVersionException( resourceName, e );
}
finally {
try {
xmlInputStream.reset();
}
catch ( IOException e ) {
throw log.getUnableToResetXmlInputStreamException( resourceName, e );
}
}
}
private String getVersionValue(StartElement startElement) {
if ( startElement == null ) {
return null;
}
Attribute versionAttribute = startElement.getAttributeByName( new QName( "version" ) );
return versionAttribute != null ? versionAttribute.getValue() : DEFAULT_VERSION;
}
private StartElement getRootElement(XMLEventReader xmlEventReader) throws XMLStreamException {
while ( xmlEventReader.hasNext() ) {
XMLEvent nextEvent = xmlEventReader.nextEvent();
if ( nextEvent.isStartElement() ) {
return nextEvent.asStartElement();
}
}
return null;
}
private synchronized XMLEventReader createXmlEventReader(InputStream xmlStream) throws XMLStreamException {
return xmlInputFactory.createXMLEventReader( xmlStream );
}
public Schema getSchema(String schemaResource) {
Schema schema = schemaCache.get( schemaResource );
if ( schema != null ) {
return schema;
}
schema = loadSchema( schemaResource );
Schema previous = schemaCache.putIfAbsent( schemaResource, schema );
return previous != null ? previous : schema;
}
private Schema loadSchema(String schemaResource) {
ClassLoader loader = ReflectionHelper.getClassLoaderFromClass( XmlParserHelper.class );
URL schemaUrl = loader.getResource( schemaResource );
SchemaFactory sf = SchemaFactory.newInstance( javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI );
Schema schema = null;
try {
schema = sf.newSchema( schemaUrl );
}
catch ( SAXException e ) {
log.unableToCreateSchema( schemaResource, e.getMessage() );
}
return schema;
}
}
| apache-2.0 |
jamesnetherton/camel | components/camel-google-calendar/src/main/java/org/apache/camel/component/google/calendar/stream/GoogleCalendarStreamEndpoint.java | 3616 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.google.calendar.stream;
import java.io.UnsupportedEncodingException;
import com.google.api.services.calendar.Calendar;
import com.google.api.services.calendar.model.Event;
import org.apache.camel.Consumer;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.component.google.calendar.GoogleCalendarClientFactory;
import org.apache.camel.impl.ScheduledPollEndpoint;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
/**
* The google-calendar component provides access to Google Calendar in a streaming mod.
*/
@UriEndpoint(firstVersion = "2.23.0",
scheme = "google-calendar-stream",
title = "Google Calendar Stream",
syntax = "google-calendar-stream:index",
consumerClass = GoogleCalendarStreamConsumer.class,
consumerOnly = true,
label = "api,cloud")
public class GoogleCalendarStreamEndpoint extends ScheduledPollEndpoint {
@UriParam
private GoogleCalendarStreamConfiguration configuration;
public GoogleCalendarStreamEndpoint(String uri, GoogleCalendarStreamComponent component, GoogleCalendarStreamConfiguration endpointConfiguration) {
super(uri, component);
this.configuration = endpointConfiguration;
}
@Override
public Producer createProducer() throws Exception {
throw new UnsupportedOperationException("The camel google calendar stream component doesn't support producer");
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
final GoogleCalendarStreamConsumer consumer = new GoogleCalendarStreamConsumer(this, processor);
configureConsumer(consumer);
return consumer;
}
public Calendar getClient() {
return ((GoogleCalendarStreamComponent)getComponent()).getClient(configuration);
}
public GoogleCalendarClientFactory getClientFactory() {
return ((GoogleCalendarStreamComponent)getComponent()).getClientFactory();
}
public void setClientFactory(GoogleCalendarClientFactory clientFactory) {
((GoogleCalendarStreamComponent)getComponent()).setClientFactory(clientFactory);
}
public GoogleCalendarStreamConfiguration getConfiguration() {
return configuration;
}
@Override
public boolean isSingleton() {
return true;
}
public Exchange createExchange(ExchangePattern pattern, Event event) throws UnsupportedEncodingException {
Exchange exchange = super.createExchange();
Message message = exchange.getIn();
message.setBody(event);
return exchange;
}
}
| apache-2.0 |
hitakaken/novbank-store | src/main/java/com/novbank/store/domain/base/profile/AbstractProfile.java | 12292 | package com.novbank.store.domain.base.profile;
import com.google.common.base.Predicate;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* Created by Cao Ke on 2015/4/18.
*/
public abstract class AbstractProfile implements ProfileSupport {
@Override
public abstract void putValue(String fieldName, Object value, Map<String, Object> options, boolean overwrite);
@Override
public void putValue(String fieldName, Object value, Map<String, Object> options){
putValue(fieldName, value, options, true);
}
@Override
public void putValue(String fieldName, Object value) {
putValue(fieldName, value, null);
}
@Override
public void putValue(String fieldName, Object value, final String k1, final Object v1) {
putValue(fieldName, value, new HashMap<String, Object>() {{
put(k1, v1);
}});
}
@Override
public void putValue(String fieldName, Object value, final String k1, final Object v1, final String k2, final Object v2) {
putValue(fieldName, value, new HashMap<String, Object>() {{
put(k1, v1);
put(k2, v2);
}});
}
@Override
public void putValue(String fieldName, Object value, final String k1, final Object v1, final String k2, final Object v2, final String k3, final Object v3) {
putValue(fieldName, value, new HashMap<String, Object>() {{
put(k1, v1);
put(k2, v2);
put(k3, v3);
}});
}
@Override
public void putValues(Map<String, Object> values, Map<String, Object> options, boolean overwrite) {
if(values == null || values.isEmpty()) return;
for(String fieldName : values.keySet()){
putValue(fieldName, values.get(fieldName), options, overwrite);
}
}
@Override
public void putValues(Map<String, Object> values) {
putValues(values, true);
}
@Override
public void putValues(Map<String, Object> values, boolean overwrite) {
putValues(values, null, overwrite);
}
@Override
public void putValues(Map<String, Object> values, Map<String, Object> options) {
putValues(values, options, true);
}
@Override
public void putValues(Map<String, Object> values, final String k1, final Object v1) {
putValues(values, new HashMap<String, Object>() {{
put(k1, v1);
}});
}
@Override
public void putValues(Map<String, Object> values, final String k1, final Object v1, final String k2, final Object v2) {
putValues(values, new HashMap<String, Object>() {{
put(k1, v1);
put(k2, v2);
}});
}
@Override
public void putValues(Map<String, Object> values, final String k1, final Object v1, final String k2, final Object v2, final String k3, final Object v3) {
putValues(values, new HashMap<String, Object>() {{
put(k1, v1);
put(k2, v2);
put(k3, v3);
}});
}
@Override
public void putValues(Map<String, Object> values, final String k1, final Object v1, boolean overwrite) {
putValues(values, new HashMap<String, Object>() {{
put(k1, v1);
}}, overwrite);
}
@Override
public void putValues(Map<String, Object> values, final String k1, final Object v1, final String k2, final Object v2, boolean overwrite) {
putValues(values, new HashMap<String, Object>() {{
put(k1, v1);
put(k2, v2);
}}, overwrite);
}
@Override
public void putValues(Map<String, Object> values, final String k1, final Object v1, final String k2, final Object v2, final String k3, final Object v3, boolean overwrite) {
putValues(values, new HashMap<String, Object>() {{
put(k1, v1);
put(k2, v2);
put(k3, v3);
}}, overwrite);
}
public abstract Map<Map<String, Object>, Object> valuesWithOptions(String fieldName, Map<String, Object> options, boolean strictly);
@Override
public Map<Map<String, Object>, Object> valuesWithOptions(String fieldName, Map<String, Object> options) {
return valuesWithOptions(fieldName, options, false);
}
@Override
public Map<Map<String, Object>, Object> valuesWithOptionsStrictly(String fieldName, Map<String, Object> options) {
return valuesWithOptions(fieldName, options, true);
}
@Override
public Map<Map<String, Object>, Object> valuesWithOptions(String fieldName) {
return valuesWithOptions(fieldName, Maps.<String, Object>newHashMap());
}
@Override
public Set values(String fieldName) {
Map<Map<String, Object>, Object> result = valuesWithOptions(fieldName);
return result!=null && !result.isEmpty() ? Sets.newHashSet(result.values()) : Sets.newHashSet();
}
@Override
public Set values(String fieldName, Map<String, Object> options) {
Map<Map<String, Object>, Object> result = valuesWithOptions(fieldName, options);
return result!=null && !result.isEmpty() ? Sets.newHashSet(result.values()) :Sets.newHashSet();
}
@Override
public Set valuesStrictly(String fieldName, Map<String, Object> options) {
Map<Map<String, Object>, Object> result = valuesWithOptionsStrictly(fieldName, options);
return result!=null && !result.isEmpty() ? Sets.newHashSet(result.values()) :Sets.newHashSet();
}
@Override
public Map<String, Set> values(Iterable<String> fieldNames) {
return values(fieldNames, Maps.<String, Object>newHashMap());
}
@Override
public Map<String, Set> values(Iterable<String> fieldNames, Map<String, Object> options) {
Map<String,Set> results = Maps.newHashMap();
if(fieldNames!=null){
for(String fieldName : fieldNames){
Set result = values(fieldName, options);
if(result!=null) results.put(fieldName,result);
}
}
return results;
}
@Override
public Map<String, Set> valuesStrictly(Iterable<String> fieldNames, Map<String, Object> options) {
Map<String,Set> results = Maps.newHashMap();
if(fieldNames!=null){
for(String fieldName : fieldNames){
Set result = valuesStrictly(fieldName, options);
if(result!=null) results.put(fieldName,result);
}
}
return results;
}
@Override
public Map<String, Map<Map<String, Object>, Object>> valuesWithOptions(Iterable<String> fieldNames) {
return valuesWithOptions(fieldNames, Maps.<String, Object>newHashMap());
}
@Override
public Map<String, Map<Map<String, Object>, Object>> valuesWithOptions(Iterable<String> fieldNames, Map<String, Object> options) {
Map<String, Map<Map<String, Object>, Object>> results = Maps.newHashMap();
if(fieldNames!=null){
for(String fieldName : fieldNames){
Map<Map<String, Object>, Object> result = valuesWithOptions(fieldName, options);
if(result!=null) results.put(fieldName,result);
}
}
return results;
}
@Override
public Map<String, Map<Map<String, Object>, Object>> valuesWithOptionsStrictly(Iterable<String> fieldNames, Map<String, Object> options) {
Map<String, Map<Map<String, Object>, Object>> results = Maps.newHashMap();
if(fieldNames!=null){
for(String fieldName : fieldNames){
Map<Map<String, Object>, Object> result = valuesWithOptionsStrictly(fieldName, options);
if(result!=null) results.put(fieldName,result);
}
}
return results;
}
public abstract Object value(String fieldName, Map<String,Object> options, boolean strict);
@Override
public Object value(String fieldName, Map<String, Object> options) {
return value(fieldName, options, false);
}
@Override
public Object valueStrictly(String fieldName, Map<String, Object> options) {
return value(fieldName, options, true);
}
@Override
public Object value(String fieldName) {
return value(fieldName, null);
}
@Override
public Object value(String fieldName, final String k1, final Object v1) {
return value(fieldName, new HashMap<String, Object>() {{
put(k1, v1);
}});
}
@Override
public Object value(String fieldName, final String k1, final Object v1, final String k2, final Object v2) {
return value(fieldName, new HashMap<String, Object>() {{
put(k1, v1);
put(k2, v2);
}});
}
@Override
public Object value(String fieldName, final String k1, final Object v1, final String k2, final Object v2, final String k3, final Object v3) {
return value(fieldName, new HashMap<String, Object>() {{
put(k1, v1);
put(k2, v2);
put(k3, v3);
}});
}
@Override
public Object valueStrictly(String fieldName, final String k1, final Object v1) {
return valueStrictly(fieldName, new HashMap<String, Object>() {{
put(k1, v1);
}});
}
@Override
public Object valueStrictly(String fieldName, final String k1, final Object v1, final String k2, final Object v2) {
return valueStrictly(fieldName, new HashMap<String, Object>() {{
put(k1, v1);
put(k2, v2);
}});
}
@Override
public Object valueStrictly(String fieldName, final String k1, final Object v1, final String k2, final Object v2, final String k3, final Object v3) {
return valueStrictly(fieldName, new HashMap<String, Object>() {{
put(k1, v1);
put(k2, v2);
put(k3, v3);
}});
}
@Override
public void putValues(ProfileSupport other, Map<String, Object> options, boolean overwrite) {
if(other == null || other.fieldNames() ==null || other.fieldNames().isEmpty())
return;
for(String fieldName : other.fieldNames()){
for(Map.Entry<Map<String,Object>,Object> entry:other.valuesWithOptions(fieldName).entrySet()){
Map<String,Object> key = entry.getKey();
Object value = entry.getValue();
if(options!=null) key.putAll(options);
putValue(fieldName, value, options, overwrite);
}
}
}
@Override
public void putValues(ProfileSupport other, Map<String, Object> options) {
putValues(other, options, false);
}
@Override
public void putValues(ProfileSupport other, boolean overwrite) {
putValues(other, null, overwrite);
}
@Override
public void putValues(ProfileSupport other) {
putValues(other, false);
}
@Override
public abstract Map<Map<String, Object>, Object> valuesWithOptions(String fieldName, Predicate predicate);
@Override
public Set values(String fieldName, Predicate predicate) {
Map<Map<String, Object>, Object> results = valuesWithOptions(fieldName,predicate);
return results!=null && !results.isEmpty() ? Sets.newHashSet(results.values()) :Sets.newHashSet();
}
@Override
public Map<String, Set> values(Iterable<String> fieldNames, Predicate predicate) {
Map<String, Set> results = Maps.newHashMap();
if(fieldNames!=null){
for(String fieldName : fieldNames){
Set result = values(fieldName, predicate);
if(result!=null) results.put(fieldName,result);
}
}
return results;
}
@Override
public Map<String, Map<Map<String, Object>, Object>> valuesWithOptions(Iterable<String> fieldNames, Predicate predicate) {
Map<String, Map<Map<String, Object>, Object>> results = Maps.newHashMap();
if(fieldNames!=null){
for(String fieldName : fieldNames){
Map<Map<String, Object>, Object> result = valuesWithOptions(fieldName, predicate);
if(result!=null) results.put(fieldName,result);
}
}
return results;
}
}
| apache-2.0 |
akjava/akjava_gwtlib | src/com/akjava/gwt/lib/client/experimental/ImageScaleRangeConverter.java | 1176 | package com.akjava.gwt.lib.client.experimental;
import com.google.common.base.Converter;
/*
* min -99 max 90
*/
public class ImageScaleRangeConverter extends Converter<Integer,Double>{
@Override
protected Double doForward(Integer rangeValue) {
if(rangeValue==0){
return 1.0;
}
if(rangeValue>0){
return 1.0+rangeValue*0.1;
}else if(rangeValue<0){
return 1.0+rangeValue*0.01;
}
return 1.0;
}
@Override
protected Integer doBackward(Double scale) {
if(scale==1){
return 0;
}else if(scale>1){
return (int)(scale*10)-10;
}else{//scale<1
int r=100-(int)(scale*100);
return -r;
}
}
private static Converter<Integer,Double> converter;
public static Converter<Integer,Double> getRangeToScale(){
Converter<Integer,Double> result=converter;
return (result == null) ? converter = new ImageScaleRangeConverter() : result;
}
private static Converter<Double,Integer> reverse;
public static Converter<Double,Integer> getScaleToRange(){
Converter<Double,Integer> result=reverse;
return (result == null) ? reverse = new ImageScaleRangeConverter().reverse() : result;
}
}
| apache-2.0 |
b2ihealthcare/snow-owl | core/com.b2international.snowowl.core/src/com/b2international/snowowl/core/repository/package-info.java | 758 | /*
* Copyright 2011-2018 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Contains classes related to configuring terminology repositories.
*/
package com.b2international.snowowl.core.repository; | apache-2.0 |
Ecwid/consul-api | src/main/java/com/ecwid/consul/v1/query/model/QueryExecution.java | 1415 | package com.ecwid.consul.v1.query.model;
import com.google.gson.annotations.SerializedName;
import java.util.List;
public class QueryExecution {
public static class DNS {
@SerializedName("TTL")
private String ttl;
public String getTtl() { return ttl; }
public void setTtl(String ttl) { this.ttl = ttl; }
@Override
public String toString() {
return "DNS{" +
"ttl=" + ttl +
'}';
}
}
@SerializedName("Service")
private String service;
@SerializedName("Nodes")
private List<QueryNode> nodes;
@SerializedName("DNS")
private DNS dns;
@SerializedName("Datacenter")
private String datacenter;
@SerializedName("Failovers")
private Integer failovers;
public String getService() { return service; }
public void setService(String service) { this.service = service; }
public List<QueryNode> getNodes() { return nodes; }
public void setNodes(List<QueryNode> nodes) { this.nodes = nodes; }
public String getDatacenter() { return datacenter; }
public void setDatacenter(String datacenter) { this.datacenter = datacenter; }
public Integer getFailovers() { return failovers; }
public void setFailovers(Integer failovers) { this.failovers = failovers; }
@Override
public String toString() {
return "CatalogNode{" +
"service=" + service +
", nodes=" + nodes +
", dns=" + dns +
", datacenter=" + datacenter +
", failovers=" + failovers +
'}';
}
}
| apache-2.0 |
EMResearch/EMB | jdk_8_maven/cs/rest/original/scout-api/api/src/main/java/se/devscout/scoutapi/resource/ActivityResourceV2.java | 8524 | package se.devscout.scoutapi.resource;
import com.codahale.metrics.annotation.Timed;
import io.dropwizard.auth.Auth;
import io.dropwizard.hibernate.UnitOfWork;
import io.dropwizard.jersey.PATCH;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.Example;
import se.devscout.scoutapi.auth.AuthResult;
import se.devscout.scoutapi.dao.ActivityDao;
import se.devscout.scoutapi.dao.ActivityRatingDao;
import se.devscout.scoutapi.model.Activity;
import se.devscout.scoutapi.model.ActivityProperties;
import se.devscout.scoutapi.model.ActivityRatingAttrs;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@Path("/v2/activities")
@Api(tags = {"activities"})
public class ActivityResourceV2 extends ActivityResource {
public ActivityResourceV2(ActivityDao dao, ActivityRatingDao activityRatingDao) {
super(dao, activityRatingDao);
}
@GET
@Timed
@UnitOfWork
@ApiOperation(value = "Search for activities")
public Response all(@ApiParam(value = API_DOCS_ATTRS_DESCR)
@QueryParam("attrs") String attrs,
@ApiParam(value = "Words to look for in activity names. Prefix word with minus character to exclude activities with that word.")//, example = "mat -matematik")
@QueryParam("name") String name,
@ApiParam(value = "Words to look for in any text field. Prefix word with minus character to exclude activities with that word.")//, example = "mat -matematik")
@QueryParam("text") String text,
@ApiParam(value = "Show featured (hand-picked by editor) activities")
@QueryParam("featured") Boolean featured,
@ApiParam(value = "Show activites with at least one of the specified tags. Comma-separated list of tag ids.")//, example = "12,34")
@QueryParam("categories") String tagIds,
@ApiParam(value = "Find activities suited for certain ages. Comma-separated list of ages.")//, example = "8,12")
@QueryParam("ages") String ages,
@ApiParam(value = "Find activities suited for certain number of participants.")//, example = "1,5")
@QueryParam("participants") String numberOfParticipants,
@ApiParam(value = "Find activities which can be completed in a certain amount of time. Unit: minutes.")//, example = "15")
@QueryParam("durations") String durations,
@ApiParam(value = "Find specific activites based on their internal identifiers. It is not expected that end-users know these number. Comma-separated list.")
@QueryParam("id") String activityIds,
@ApiParam(value = "Limit result to activities which the current user (as determined by API key) has marked as favourites.")
@QueryParam("my_favourites") Boolean myFavourites,
@ApiParam(value = "Show activities which have been rated by at least this many users")
@QueryParam("ratings_count_min") Long ratingsCountMin,
@ApiParam(value = "Show activities whose average rating is at this amount.")//, example = "1.0")
@QueryParam("ratings_average_min") Double ratingsAverageMin,
@ApiParam(value = "Limit result to a number of random activities matching the other conditions.")
@QueryParam("random") int random,
@ApiParam(value = "Limit result to the overall favourite activities. This means a list of activities sorted by the number of users who have marked them as their favourites. This parameter cannot be used together with any other filtering parameters (meaning that it is not possible to use it to, for example, show favourites for a particular category or age group).")
@QueryParam("favourites") int favourites) {
if (myFavourites != null) {
throw new WebApplicationException("API currently does not support filtering on your own favourites.");
}
return okResponse(getActivities(
name,
text,
featured,
tagIds,
ages,
numberOfParticipants,
durations,
activityIds,
random,
ratingsCountMin,
ratingsAverageMin,
null,
favourites > 0 ? ActivityDao.SortOrder.favouritesCount : null,
favourites > 0 ? favourites : null),
attrs);
}
@Override
@DELETE
@Timed
@Path("{id}")
@UnitOfWork
@ApiOperation(value = "Delete an activity")
public void delete(@Auth @ApiParam(hidden = true) AuthResult authResult , @Context HttpServletResponse response, @PathParam("id") long id) {
super.delete(authResult, response, id);
}
@Override
@POST
@Timed
@UnitOfWork
@ApiOperation(value = "Create a new activity")
public Activity create(@Auth @ApiParam(hidden = true) AuthResult authResult ,
@Context HttpServletResponse response, ActivityProperties properties){
return super.create(authResult, response, properties);
}
@Override
@GET
@Timed
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
@UnitOfWork
@ApiOperation(value = "Read a specific activity")
public Response get(@PathParam("id") long id,
@ApiParam(value = API_DOCS_ATTRS_DESCR)
@QueryParam("attrs") String attrs) {
return super.get(id, attrs);
}
@Override
@GET
@Timed
@Path("{id}/rating")
@Produces(MediaType.APPLICATION_JSON)
@UnitOfWork
@ApiOperation(value = "Get the end-user's rating the an activity")
public Response getRating(@Auth @ApiParam(hidden = true) AuthResult authResult , @Context HttpServletResponse response, @PathParam("id") long id,
@ApiParam(value = API_DOCS_ATTRS_DESCR)
@QueryParam("attrs") String attrs) {
return super.getRating(authResult, response, id, attrs);
}
@Override
@POST
@Timed
@Path("{id}/rating")
@Produces(MediaType.APPLICATION_JSON)
@UnitOfWork
@ApiOperation(value = "Set the end-user's rating the an activity")
public void postRating(@Auth @ApiParam(hidden = true) AuthResult authResult , @Context HttpServletResponse response, @PathParam("id") long id, ActivityRatingAttrs attrs) {
super.postRating(authResult, response, id, attrs);
}
@Override
@DELETE
@Timed
@Path("{id}/rating")
@Produces(MediaType.APPLICATION_JSON)
@UnitOfWork
@ApiOperation(value = "Remove the end-user's rating the an activity")
public void deleteRating(@Auth @ApiParam(hidden = true) AuthResult authResult , @Context HttpServletResponse response, @PathParam("id") long id) {
super.deleteRating(authResult, response, id);
}
@Override
@PUT
@Timed
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
@UnitOfWork
@ApiOperation(value = "Update an activity with new information. Activity properties not specified in the request will be cleared.")
public Activity update(@Auth @ApiParam(hidden = true) AuthResult authResult , @Context HttpServletResponse response, @PathParam("id") long id, ActivityProperties properties) {
return super.update(authResult, response, id, properties);
}
@Override
@PATCH
@Timed
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
@UnitOfWork
@ApiOperation(httpMethod = "PATCH", value = "Update an activity with new information. Only the properties specified in the request will be updated.")
public Activity patch(@Auth @ApiParam(hidden = true) AuthResult authResult , @Context HttpServletResponse response, @PathParam("id") long id, ActivityProperties properties) {
return super.patch(authResult, response, id, properties);
}
}
| apache-2.0 |
zouzhberk/ambaridemo | demo-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentHostRequest.java | 4935 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.controller;
public class ServiceComponentHostRequest {
private String clusterName; // REF
private String serviceName;
private String componentName;
private String hostname;
private String state;
private String desiredState; // CREATE/UPDATE
private String desiredStackId; // UPDATE
private String staleConfig; // GET - predicate
private String adminState; // GET - predicate
private String maintenanceState; // UPDATE
public ServiceComponentHostRequest(String clusterName,
String serviceName,
String componentName,
String hostname,
String desiredState) {
super();
this.clusterName = clusterName;
this.serviceName = serviceName;
this.componentName = componentName;
this.hostname = hostname;
this.desiredState = desiredState;
}
/**
* @return the serviceName
*/
public String getServiceName() {
return serviceName;
}
/**
* @param serviceName the serviceName to set
*/
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
/**
* @return the componentName
*/
public String getComponentName() {
return componentName;
}
/**
* @param componentName the componentName to set
*/
public void setComponentName(String componentName) {
this.componentName = componentName;
}
/**
* @return the hostname
*/
public String getHostname() {
return hostname;
}
/**
* @param hostname the hostname to set
*/
public void setHostname(String hostname) {
this.hostname = hostname;
}
/**
* @return the desiredState
*/
public String getDesiredState() {
return desiredState;
}
/**
* @param desiredState the desiredState to set
*/
public void setDesiredState(String desiredState) {
this.desiredState = desiredState;
}
/**
* @return the state
*/
public String getState() {
return state;
}
/**
* @param state the State to set
*/
public void setState(String state) {
this.state = state;
}
/**
* @return the desiredStackId
*/
public String getDesiredStackId() {
return desiredStackId;
}
/**
* @param desiredStackId the desiredStackId to set
*/
public void setDesiredStackId(String desiredStackId) {
this.desiredStackId = desiredStackId;
}
/**
* @return the clusterName
*/
public String getClusterName() {
return clusterName;
}
/**
* @param clusterName the clusterName to set
*/
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
/**
* @param staleConfig whether the config is stale
*/
public void setStaleConfig(String staleConfig) {
this.staleConfig = staleConfig;
}
/**
* @return Stale config indicator
*/
public String getStaleConfig() {
return this.staleConfig;
}
/**
* @param adminState the adminState to use as predicate
*/
public void setAdminState(String adminState) {
this.adminState = adminState;
}
/**
* @return the admin state of the component
*/
public String getAdminState() {
return this.adminState;
}
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{" + " clusterName=").append(clusterName)
.append(", serviceName=").append(serviceName)
.append(", componentName=").append(componentName)
.append(", hostname=").append(hostname)
.append(", desiredState=").append(desiredState)
.append(", state=").append(state)
.append(", desiredStackId=").append(desiredStackId)
.append(", staleConfig=").append(staleConfig)
.append(", adminState=").append(adminState).append("}");
return sb.toString();
}
/**
* @param state the maintenance state
*/
public void setMaintenanceState(String state) {
maintenanceState = state;
}
/**
* @return the maintenance state
*/
public String getMaintenanceState() {
return maintenanceState;
}
}
| apache-2.0 |
osinstom/onos | apps/bgprouter/src/main/java/org/onosproject/bgprouter/BgpRouter.java | 6719 | /*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.bgprouter;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.CoreService;
import org.onosproject.component.ComponentService;
import org.onosproject.net.intf.InterfaceService;
import org.onosproject.net.DeviceId;
import org.onosproject.net.config.NetworkConfigRegistry;
import org.onosproject.net.device.DeviceEvent;
import org.onosproject.net.device.DeviceListener;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.flowobjective.FlowObjectiveService;
import org.onosproject.net.packet.PacketService;
import org.onosproject.routing.RoutingService;
import org.onosproject.routing.config.BgpConfig;
import org.onosproject.routing.config.RoutingConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
/**
* BgpRouter component.
*/
@Component(immediate = true)
public class BgpRouter {
private static final Logger log = LoggerFactory.getLogger(BgpRouter.class);
public static final String BGP_ROUTER_APP = "org.onosproject.bgprouter";
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected CoreService coreService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected InterfaceService interfaceService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected NetworkConfigRegistry networkConfigService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected PacketService packetService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected FlowObjectiveService flowObjectiveService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected DeviceService deviceService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected ComponentService componentService;
private ApplicationId appId;
// Device id of control-plane switch (OVS) connected to BGP Speaker - should be
// learned from config
private DeviceId ctrlDeviceId;
// Responsible for handling BGP traffic (encapsulated within OF messages)
// between the data-plane switch and the Quagga VM using a control plane OVS.
private TunnellingConnectivityManager connectivityManager;
private DeviceListener deviceListener;
private IcmpHandler icmpHandler;
private static List<String> components = new ArrayList<>();
static {
components.add("org.onosproject.routing.bgp.BgpSessionManager");
components.add("org.onosproject.routing.impl.BgpSpeakerNeighbourHandler");
}
@Activate
protected void activate() {
appId = coreService.registerApplication(BGP_ROUTER_APP);
RoutingConfiguration.register(networkConfigService);
components.forEach(name -> componentService.activate(appId, name));
ApplicationId routerAppId = coreService.getAppId(RoutingService.ROUTER_APP_ID);
BgpConfig bgpConfig =
networkConfigService.getConfig(routerAppId, RoutingService.CONFIG_CLASS);
if (bgpConfig == null) {
log.error("No BgpConfig found");
return;
}
getDeviceConfiguration(bgpConfig);
connectivityManager = new TunnellingConnectivityManager(appId,
bgpConfig,
interfaceService,
packetService,
flowObjectiveService);
icmpHandler = new IcmpHandler(interfaceService, packetService);
deviceListener = new InnerDeviceListener();
deviceService.addListener(deviceListener);
connectivityManager.start();
icmpHandler.start();
if (deviceService.isAvailable(ctrlDeviceId)) {
connectivityManager.notifySwitchAvailable();
}
log.info("BgpRouter started");
}
@Deactivate
protected void deactivate() {
components.forEach(name -> componentService.deactivate(appId, name));
RoutingConfiguration.unregister(networkConfigService);
connectivityManager.stop();
icmpHandler.stop();
deviceService.removeListener(deviceListener);
log.info("BgpRouter stopped");
}
private void getDeviceConfiguration(BgpConfig bgpConfig) {
Optional<BgpConfig.BgpSpeakerConfig> bgpSpeaker =
bgpConfig.bgpSpeakers().stream().findAny();
if (!bgpSpeaker.isPresent()) {
log.error("BGP speaker configuration not found");
return;
}
ctrlDeviceId = bgpSpeaker.get().connectPoint().deviceId();
log.info("Control Plane OVS dpid: {}", ctrlDeviceId);
}
// Triggers driver setup when a device is (re)detected.
private class InnerDeviceListener implements DeviceListener {
@Override
public void event(DeviceEvent event) {
switch (event.type()) {
case DEVICE_ADDED:
case DEVICE_AVAILABILITY_CHANGED:
if (deviceService.isAvailable(event.subject().id())) {
log.info("Device connected {}", event.subject().id());
if (event.subject().id().equals(ctrlDeviceId)) {
connectivityManager.notifySwitchAvailable();
}
}
break;
// TODO other cases
case DEVICE_UPDATED:
case DEVICE_REMOVED:
case DEVICE_SUSPENDED:
case PORT_ADDED:
case PORT_UPDATED:
case PORT_REMOVED:
default:
break;
}
}
}
}
| apache-2.0 |
OlivierCroisier/LP4J | lp4j-midi/src/test/java/net.thecodersbreakfast.lp4j.midi/MidiDeviceConfigurationTest.java | 1605 | /*
* Copyright 2015 Olivier Croisier (thecodersbreakfast.net)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.thecodersbreakfast.lp4j.midi;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import javax.sound.midi.MidiDevice;
@RunWith(MockitoJUnitRunner.class)
public class MidiDeviceConfigurationTest {
@Mock
private MidiDevice inputDevice;
@Mock
private MidiDevice outputDevice;
private MidiDeviceConfiguration configuration;
@Before
public void init() {
configuration = new MidiDeviceConfiguration(inputDevice, outputDevice);
}
@Test
public void testGetInputDevice() throws Exception {
MidiDevice device = configuration.getInputDevice();
Assert.assertEquals(inputDevice, device);
}
@Test
public void testGetOutputDevice() throws Exception {
MidiDevice device = configuration.getOutputDevice();
Assert.assertEquals(outputDevice, device);
}
}
| apache-2.0 |
cjstehno/ersatz | ersatz/src/main/java/io/github/cjstehno/ersatz/encdec/MimeTypes.java | 1421 | /**
* Copyright (C) 2022 Christopher J. Stehno
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.cjstehno.ersatz.encdec;
import javax.activation.MimeType;
import javax.activation.MimeTypeParseException;
/**
* Helpers for working with <code>MimeType</code> objects.
*/
public interface MimeTypes {
/**
* Creates a <code>MimeType</code> object from the specified string designator. If a parsing exception is thrown, it
* will be wrapped in an <code>IllegalArgumentException</code> and rethrown.
*
* @param value the mime-type text value
* @return the wrapped MimeType
*/
static MimeType createMimeType(final String value) {
try {
return new MimeType(value);
} catch (MimeTypeParseException e) {
throw new IllegalArgumentException(e.getMessage());
}
}
}
| apache-2.0 |
m-m-m/search | content-parser/impl-pdf/src/main/java/net/sf/mmm/content/parser/impl/pdf/ContentParserPdf.java | 2550 | /* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0 */
package net.sf.mmm.content.parser.impl.pdf;
import java.io.InputStream;
import javax.inject.Named;
import javax.inject.Singleton;
import net.sf.mmm.content.parser.api.ContentParserOptions;
import net.sf.mmm.content.parser.base.AbstractContentParser;
import net.sf.mmm.util.context.api.MutableGenericContext;
import org.apache.pdfbox.pdfparser.PDFParser;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDDocumentInformation;
import org.apache.pdfbox.util.PDFTextStripper;
/**
* This is the implementation of the
* {@link net.sf.mmm.content.parser.api.ContentParser} interface for PDF
* documents (content with the mimetype "application/pdf").
*
* @author Joerg Hohwiller (hohwille at users.sourceforge.net)
*/
@Singleton
@Named
public class ContentParserPdf extends AbstractContentParser {
/** The mimetype. */
public static final String KEY_MIMETYPE = "application/pdf";
/** The default extension. */
public static final String KEY_EXTENSION = "pdf";
/**
* The constructor.
*/
public ContentParserPdf() {
super();
}
/**
* {@inheritDoc}
*/
public String getExtension() {
return KEY_EXTENSION;
}
/**
* {@inheritDoc}
*/
public String getMimetype() {
return KEY_MIMETYPE;
}
/**
* {@inheritDoc}
*/
@Override
public void parse(InputStream inputStream, long filesize, ContentParserOptions options,
MutableGenericContext context) throws Exception {
PDFParser parser = new PDFParser(inputStream);
parser.parse();
PDDocument pdfDoc = parser.getPDDocument();
try {
if (pdfDoc.isEncrypted()) {
// pdfDoc.decrypt("password");
return;
}
PDDocumentInformation info = pdfDoc.getDocumentInformation();
String title = info.getTitle();
if (title != null) {
context.setVariable(VARIABLE_NAME_TITLE, title);
}
String keywords = info.getKeywords();
if (keywords != null) {
context.setVariable(VARIABLE_NAME_KEYWORDS, keywords);
}
String author = info.getAuthor();
if (author != null) {
context.setVariable(VARIABLE_NAME_CREATOR, author);
}
if (filesize < options.getMaximumBufferSize()) {
PDFTextStripper stripper = new PDFTextStripper();
context.setVariable(VARIABLE_NAME_TEXT, stripper.getText(pdfDoc));
}
} finally {
pdfDoc.close();
}
}
}
| apache-2.0 |
Richard-Linsdale/lindos | lifecycle/src/main/java/uk/theretiredprogrammer/lindos/authentication/dataobjects/ApplicationRoot.java | 4388 | /*
* Copyright 2015-2017 Richard Linsdale.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.theretiredprogrammer.lindos.authentication.dataobjects;
import java.io.IOException;
import java.util.List;
import org.openide.util.Lookup;
import org.openide.util.lookup.ServiceProvider;
import uk.theretiredprogrammer.nbpcglibrary.common.Listener;
import uk.theretiredprogrammer.nbpcglibrary.common.LogBuilder;
import uk.theretiredprogrammer.nbpcglibrary.data.entity.*;
import uk.theretiredprogrammer.nbpcglibrary.data.entityreferences.*;
/**
* The ApplicationRoot Object
*
* (Class generated by NetBeans Platform Code Generator tools using script.xml.
* Do not edit this file. Apply any changes to the definition file and
* regenerate all files.)
*
* @author Richard Linsdale (richard at theretiredprogrammer.uk)
*/
@ServiceProvider(service = ApplicationRoot.class)
public class ApplicationRoot extends CoreEntity {
/**
* the ApplicationRoot field identifiers
*/
public enum ApplicationRootField {
/**
* the applications collection
*/
APPLICATIONS,
}
private final EntityReferenceSet<Integer, Application, ApplicationRoot> applications;
@Override
public final String instanceDescription() {
return LogBuilder.instanceDescription(this);
}
/**
* Get the ApplicationRoot entity.
*
* @return the ApplicationRoot entity
*/
public static ApplicationRoot get() {
return Lookup.getDefault().lookup(ApplicationRoot.class);
}
/**
* Constructor.
*
* @throws IOException if problem with obtaining/parsing data
*/
public ApplicationRoot() throws IOException {
super("ApplicationRoot", "folder_table");
applications = new EntityReferenceSet<>(instanceDescription() + ">Applications", Application.EM.class);
applications.load();
}
/**
* Add a Application to the root's list
*
* @param e the application
*/
public void addApplication(Application e) {
applications.add(e);
}
/**
* Remove a Application from the root's list
*
* @param e the Application
*/
public void removeApplication(Application e) {
applications.remove(e);
}
/**
* Add set listener to applications collections
*
* @param listener the set change listener to add
*/
public void addApplicationSetChangeListener(Listener<SetChangeEventParams> listener) {
applications.addSetListener(listener);
}
/**
* remove set listener to applications collections
*
* @param listener the set change listener to add
*/
public void removeApplicationSetChangeListener(Listener<SetChangeEventParams> listener) {
applications.removeSetListener(listener);
}
/**
* Add set listener to requested collection field (set)
*
* @param listener the set change listener to be added
*/
public static void addApplicationsSetChangeListeners(Listener<SetChangeEventParams> listener) {
get().applications.addSetListener(listener);
}
/**
* Remove set listener from requested collection field (set)
*
* @param listener the set change listener to be removed
*/
public static void removeApplicationsSetChangeListeners(Listener<SetChangeEventParams> listener) {
get().applications.removeSetListener(listener);
}
/**
* Get the list of Applications
*
* @return the list of Applications
*/
public List<Application> getApplications() {
return applications.get();
}
@Override
protected final void entityRestoreState() {
applications.restoreState();
}
@Override
public String getDisplayName() {
return "Applications";
}
}
| apache-2.0 |
xasx/camunda-bpm-platform | engine/src/main/java/org/camunda/bpm/engine/impl/calendar/DateTimeUtil.java | 2071 | /*
* Copyright © 2013-2018 camunda services GmbH and various authors (info@camunda.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.calendar;
import java.util.Date;
import java.util.TimeZone;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.LocalTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
/**
* Utility for {@link DateTime} that uses the JVM timezone
* for date / time related operations.
*
* This is important as the JVM timezone and the system timezone may
* differ which leads to different behavior in
* {@link java.text.SimpleDateFormat} (using JVM default timezone) and
* JODA time (using system default timezone).
*
* @author Nico Rehwaldt
*
* @see CAM-1170
*/
public class DateTimeUtil {
private static final DateTimeZone JVM_DEFAULT_DATE_TIME_ZONE = DateTimeZone.forTimeZone(TimeZone.getDefault());
private static DateTimeFormatter DATE_TIME_FORMATER;
private static DateTimeFormatter getDataTimeFormater() {
if (DATE_TIME_FORMATER == null) {
DATE_TIME_FORMATER = ISODateTimeFormat.dateTimeParser().withZone(JVM_DEFAULT_DATE_TIME_ZONE);
}
return DATE_TIME_FORMATER;
}
public static DateTime now() {
return new DateTime(JVM_DEFAULT_DATE_TIME_ZONE);
}
public static DateTime parseDateTime(String date) {
return getDataTimeFormater().parseDateTime(date);
}
public static Date parseDate(String date) {
return parseDateTime(date).toDate();
}
}
| apache-2.0 |
smartcommunitylab/AAC | src/main/java/it/smartcommunitylab/aac/common/InvalidPasswordException.java | 1005 | /*******************************************************************************
* Copyright 2015 Fondazione Bruno Kessler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package it.smartcommunitylab.aac.common;
/**
* @author raman
*
*/
public class InvalidPasswordException extends RegistrationException {
private static final long serialVersionUID = 3481701175901182216L;
}
| apache-2.0 |
phax/ph-oton | ph-oton-bootstrap4/src/main/java/com/helger/photon/bootstrap4/card/BootstrapCard.java | 2049 | /*
* Copyright (C) 2018-2022 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.photon.bootstrap4.card;
import javax.annotation.Nonnull;
import com.helger.commons.annotation.ReturnsMutableCopy;
import com.helger.html.hc.IHCConversionSettingsToNode;
import com.helger.html.hc.IHCHasChildrenMutable;
import com.helger.html.hc.IHCNode;
import com.helger.photon.bootstrap4.CBootstrapCSS;
import com.helger.photon.bootstrap4.base.AbstractBootstrapDiv;
/**
* Bootstrap 4 card (replaces B3 panel, well and thumbnail).
*
* @author Philip Helger
*/
public class BootstrapCard extends AbstractBootstrapDiv <BootstrapCard>
{
public BootstrapCard ()
{}
@Nonnull
@ReturnsMutableCopy
public final BootstrapCardHeader createAndAddHeader ()
{
return addAndReturnChild (new BootstrapCardHeader ());
}
@Nonnull
@ReturnsMutableCopy
public final BootstrapCardBody createAndAddBody ()
{
return addAndReturnChild (new BootstrapCardBody ());
}
@Nonnull
@ReturnsMutableCopy
public final BootstrapCardFooter createAndAddFooter ()
{
return addAndReturnChild (new BootstrapCardFooter ());
}
@Override
protected void onFinalizeNodeState (@Nonnull final IHCConversionSettingsToNode aConversionSettings,
@Nonnull final IHCHasChildrenMutable <?, ? super IHCNode> aTargetNode)
{
super.onFinalizeNodeState (aConversionSettings, aTargetNode);
addClass (CBootstrapCSS.CARD);
}
}
| apache-2.0 |
rajeevanv89/developer-studio | esb/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/edit/parts/CallTemplateMediatorEditPart.java | 18759 | package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts;
import static org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EditorUtils.*;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.maven.project.MavenProject;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.Label;
import org.eclipse.draw2d.PositionConstants;
import org.eclipse.draw2d.Shape;
import org.eclipse.draw2d.StackLayout;
import org.eclipse.draw2d.ToolbarLayout;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.transaction.TransactionalEditingDomain;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.EditPolicy;
import org.eclipse.gef.Request;
import org.eclipse.gef.commands.Command;
import org.eclipse.gef.editpolicies.LayoutEditPolicy;
import org.eclipse.gef.editpolicies.NonResizableEditPolicy;
import org.eclipse.gef.requests.CreateRequest;
import org.eclipse.gmf.runtime.diagram.ui.commands.ICommandProxy;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IBorderItemEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.BorderItemSelectionEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.CreationEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.DragDropEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles;
import org.eclipse.gmf.runtime.diagram.ui.figures.BorderItemLocator;
import org.eclipse.gmf.runtime.draw2d.ui.figures.ConstrainedToolbarLayout;
import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel;
import org.eclipse.gmf.runtime.emf.type.core.commands.SetValueCommand;
import org.eclipse.gmf.runtime.emf.type.core.requests.SetRequest;
import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure;
import org.eclipse.gmf.runtime.notation.Node;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.dialogs.IInputValidator;
import org.eclipse.jface.dialogs.InputDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.IEditorDescriptor;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.part.FileEditorInput;
import org.wso2.developerstudio.eclipse.esb.project.artifact.ESBArtifact;
import org.wso2.developerstudio.eclipse.esb.project.artifact.ESBProjectArtifact;
import org.wso2.developerstudio.eclipse.esb.project.utils.ESBProjectUtils;
import org.wso2.developerstudio.eclipse.gmf.esb.CallTemplateMediator;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.ProxyService;
import org.wso2.developerstudio.eclipse.gmf.esb.Sequence;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EditorUtils;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EsbGraphicalShape;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EsbGraphicalShapeWithLabel;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.FixedBorderItemLocator;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.FixedSizedAbstractMediator;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.OpenSeparatelyEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.ShowPropertyViewEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.editpolicy.FeedbackIndicateDragDropEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.utils.OpenEditorUtils;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.CallTemplateMediatorCanonicalEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.CallTemplateMediatorItemSemanticEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbDiagramEditorUtil;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry;
import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog;
import org.wso2.developerstudio.eclipse.logging.core.Logger;
import org.wso2.developerstudio.eclipse.maven.util.MavenUtils;
/**
* @generated NOT
*/
public class CallTemplateMediatorEditPart extends FixedSizedAbstractMediator {
/**
* @generated
*/
public static final int VISUAL_ID = 3594;
/**
* @generated
*/
protected IFigure contentPane;
private static IDeveloperStudioLog log = Logger
.getLog("org.wso2.developerstudio.eclipse.gmf.esb.diagram");
/**
* @generated
*/
public CallTemplateMediatorEditPart(View view) {
super(view);
}
/**
* @generated NOT
*/
protected void createDefaultEditPolicies() {
installEditPolicy(EditPolicyRoles.CREATION_ROLE,
new CreationEditPolicy());
super.createDefaultEditPolicies();
installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE,
new CallTemplateMediatorItemSemanticEditPolicy());
installEditPolicy(EditPolicyRoles.DRAG_DROP_ROLE,
new DragDropEditPolicy());
installEditPolicy(EditPolicyRoles.DRAG_DROP_ROLE,
new FeedbackIndicateDragDropEditPolicy());
installEditPolicy(EditPolicyRoles.CANONICAL_ROLE,
new CallTemplateMediatorCanonicalEditPolicy());
installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy());
// For handle Double click Event.
installEditPolicy(EditPolicyRoles.OPEN_ROLE,
new OpenSeparatelyEditPolicy());
// XXX need an SCR to runtime to have another abstract superclass that would let children add reasonable editpolicies
// removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE);
}
/**
* @generated
*/
protected LayoutEditPolicy createLayoutEditPolicy() {
org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() {
protected EditPolicy createChildEditPolicy(EditPart child) {
View childView = (View) child.getModel();
switch (EsbVisualIDRegistry.getVisualID(childView)) {
case CallTemplateMediatorInputConnectorEditPart.VISUAL_ID:
case CallTemplateMediatorOutputConnectorEditPart.VISUAL_ID:
return new BorderItemSelectionEditPolicy();
}
EditPolicy result = child
.getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE);
if (result == null) {
result = new NonResizableEditPolicy();
}
return result;
}
protected Command getMoveChildrenCommand(Request request) {
return null;
}
protected Command getCreateCommand(CreateRequest request) {
return null;
}
};
return lep;
}
/**
* @generated NOT
*/
protected IFigure createNodeShape() {
return primaryShape = new CallTemplateMediatorFigure() {
public void setBounds(org.eclipse.draw2d.geometry.Rectangle rect) {
super.setBounds(rect);
if (this.getBounds().getLocation().x != 0
&& this.getBounds().getLocation().y != 0) {
connectToMostSuitableElement();
reAllocate(rect);
}
};
};
}
/**
* @generated
*/
public CallTemplateMediatorFigure getPrimaryShape() {
return (CallTemplateMediatorFigure) primaryShape;
}
protected boolean addFixedChild(EditPart childEditPart) {
if (childEditPart instanceof CallTemplateMediatorDescriptionEditPart) {
((CallTemplateMediatorDescriptionEditPart) childEditPart)
.setLabel(getPrimaryShape()
.getCallTemplateMediatorDescriptionLabel());
return true;
}
if (childEditPart instanceof CallTemplateMediatorInputConnectorEditPart) {
IFigure borderItemFigure = ((CallTemplateMediatorInputConnectorEditPart) childEditPart)
.getFigure();
BorderItemLocator locator = new FixedBorderItemLocator(
getMainFigure(), borderItemFigure, PositionConstants.WEST,
0.5);
getBorderedFigure().getBorderItemContainer().add(borderItemFigure,
locator);
return true;
}
if (childEditPart instanceof CallTemplateMediatorOutputConnectorEditPart) {
IFigure borderItemFigure = ((CallTemplateMediatorOutputConnectorEditPart) childEditPart)
.getFigure();
BorderItemLocator locator = new FixedBorderItemLocator(
getMainFigure(), borderItemFigure, PositionConstants.EAST,
0.5);
getBorderedFigure().getBorderItemContainer().add(borderItemFigure,
locator);
return true;
}
return false;
}
protected boolean removeFixedChild(EditPart childEditPart) {
if (childEditPart instanceof CallTemplateMediatorDescriptionEditPart) {
return true;
}
if (childEditPart instanceof CallTemplateMediatorInputConnectorEditPart) {
getBorderedFigure()
.getBorderItemContainer()
.remove(((CallTemplateMediatorInputConnectorEditPart) childEditPart)
.getFigure());
return true;
}
if (childEditPart instanceof CallTemplateMediatorOutputConnectorEditPart) {
getBorderedFigure()
.getBorderItemContainer()
.remove(((CallTemplateMediatorOutputConnectorEditPart) childEditPart)
.getFigure());
return true;
}
return false;
}
protected void addChildVisual(EditPart childEditPart, int index) {
if (addFixedChild(childEditPart)) {
return;
}
super.addChildVisual(childEditPart, -1);
}
protected void removeChildVisual(EditPart childEditPart) {
if (removeFixedChild(childEditPart)) {
return;
}
super.removeChildVisual(childEditPart);
}
protected IFigure getContentPaneFor(IGraphicalEditPart editPart) {
if (editPart instanceof IBorderItemEditPart) {
return getBorderedFigure().getBorderItemContainer();
}
return getContentPane();
}
/**
* Creates figure for this edit part.
*
* Body of this method does not depend on settings in generation model
* so you may safely remove <i>generated</i> tag and modify it.
*
* @generated NOT
*/
protected NodeFigure createMainFigure() {
NodeFigure figure = createNodePlate();
figure.setLayoutManager(new ToolbarLayout(true));
IFigure shape = createNodeShape();
figure.add(shape);
contentPane = setupContentPane(shape);
return figure;
}
/**
* Default implementation treats passed figure as content pane.
* Respects layout one may have set for generated figure.
* @param nodeShape instance of generated figure class
* @generated
*/
protected IFigure setupContentPane(IFigure nodeShape) {
if (nodeShape.getLayoutManager() == null) {
ConstrainedToolbarLayout layout = new ConstrainedToolbarLayout();
layout.setSpacing(5);
nodeShape.setLayoutManager(layout);
}
return nodeShape; // use nodeShape itself as contentPane
}
/**
* @generated
*/
public IFigure getContentPane() {
if (contentPane != null) {
return contentPane;
}
return super.getContentPane();
}
/**
* @generated
*/
protected void setForegroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setForegroundColor(color);
}
}
/**
* @generated
*/
protected void setBackgroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setBackgroundColor(color);
}
}
/**
* @generated
*/
protected void setLineWidth(int width) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineWidth(width);
}
}
/**
* @generated
*/
protected void setLineType(int style) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineStyle(style);
}
}
private ESBArtifact createArtifact(String name, String groupId,
String version, String path, String type) {
ESBArtifact artifact = new ESBArtifact();
artifact.setName(name);
artifact.setVersion(version);
artifact.setType(type);
artifact.setServerRole("EnterpriseServiceBus");
artifact.setGroupId(groupId);
artifact.setFile(path);
return artifact;
}
private String getMavenGroupID(IProject project) {
String groupID = "com.example";
try {
MavenProject mavenProject = MavenUtils.getMavenProject(project
.getFile("pom.xml").getLocation().toFile());
groupID = mavenProject.getGroupId() + ".template";
} catch (Exception e) {
//ignore. Then group id would be default.
}
return groupID;
}
private void addSequenceToArtifactXML(String templateName) {
IProject activeProject = getActiveProject();
ESBProjectArtifact esbProjectArtifact = new ESBProjectArtifact();
try {
esbProjectArtifact.fromFile(activeProject.getFile("artifact.xml")
.getLocation().toFile());
esbProjectArtifact.addESBArtifact(createArtifact(templateName,
getMavenGroupID(activeProject), "1.0.0",
"src/main/synapse-config/templates/" + templateName
+ ".xml", "synapse/sequenceTemplate"));
esbProjectArtifact.toFile();
} catch (Exception e) {
log.error("Error while updating Artifact.xml");
}
}
public boolean createFiles(String name, String fileURI1, String fileURI2,
IProject currentProject) {
Resource diagram;
String basePath = "platform:/resource/" + currentProject.getName()
+ "/" + TEMPLATE_RESOURCE_DIR + "/";
IFile file = currentProject.getFile(TEMPLATE_RESOURCE_DIR + "/"
+ fileURI1);
if (!file.exists()) {
IFile fileTobeOpened = currentProject.getFile(SYNAPSE_CONFIG_DIR
+ "/templates/" + name + ".xml");
try {
diagram = EsbDiagramEditorUtil.createDiagram(
URI.createURI(basePath + fileURI1),
URI.createURI(basePath + fileURI2),
new NullProgressMonitor(), "template.sequence", name,
null);
if (fileTobeOpened.exists()) {
String diagramPath = diagram.getURI()
.toPlatformString(true);
OpenEditorUtils oeUtils = new OpenEditorUtils();
oeUtils.openSeparateEditor(fileTobeOpened, diagramPath);
} else {
addSequenceToArtifactXML(name);
EsbDiagramEditorUtil.openDiagram(diagram);
}
} catch (Exception e) {
log.error("Cannot open file " + fileTobeOpened, e);
return false;
}
return true;
}
else {
IWorkbenchPage page = PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage();
IEditorDescriptor desc = PlatformUI.getWorkbench()
.getEditorRegistry().getDefaultEditor(file.getName());
try {
page.openEditor(new FileEditorInput(file), desc.getId());
} catch (PartInitException e) {
log.error("Cannot init editor", e);
}
return true;
}
}
public void openWithSeparateEditor() {
IProject activeProject = EditorUtils.getActiveProject();
String name = ((CallTemplateMediator) ((org.eclipse.gmf.runtime.notation.impl.NodeImpl) getModel())
.getElement()).getTargetTemplate();
/*
* File creations.
*/
createFiles(name, "template_" + name + ".esb_diagram", "template_"
+ name + ".esb", activeProject);
//EditorUtils.updateToolpalette();
}
public String calculateDefaultName() {
IProject activeProject = EditorUtils.getActiveProject();
String finalName = "Sequence_Template_1";
int i = 1;
try {
while (ESBProjectUtils.artifactExists(activeProject, finalName)) {
finalName = finalName.replaceAll("\\d+$", "");
i++;
finalName = finalName.concat(i + "");
}
} catch (Exception e) {
finalName = finalName.concat("_")
.concat(RandomStringUtils.randomAlphabetic(5))
.concat("_" + i);
}
return finalName;
}
public void createDialogBox() {
final EObject callTemplate = (CallTemplateMediator) ((org.eclipse.gmf.runtime.notation.impl.NodeImpl) getModel())
.getElement();
// For validation: user should not enter "" value for name.
if (((CallTemplateMediator) callTemplate).getTargetTemplate().trim()
.equals("")) {
IInputValidator validator = new IInputValidator() {
public String isValid(String str) {
if (str.trim().isEmpty()) {
return "Target Template cannot be empty";
} else if (str.indexOf(0x20) != -1) {
return "Target Template cannot contain spaces";
}
return null;
}
};
String defaultName = calculateDefaultName();
final InputDialog templateNameInput = new InputDialog(new Shell(),
"Enter Target Template", "Target Template", defaultName,
validator) {
protected Control createDialogArea(Composite parent) {
Composite composite = (Composite) super
.createDialogArea(parent);
return composite;
}
};
int open = templateNameInput.open();
if (open == Dialog.OK) {
Display.getDefault().asyncExec(new Runnable() {
public void run() {
String targetTemplate = templateNameInput.getValue();
TransactionalEditingDomain editingDomain = getEditingDomain();
SetRequest setRequest = new SetRequest(
editingDomain,
callTemplate,
EsbPackage.eINSTANCE
.getCallTemplateMediator_TargetTemplate(),
targetTemplate);
SetValueCommand operation = new SetValueCommand(
setRequest) {
public boolean canUndo() {
return true;
}
public boolean canRedo() {
return true;
}
};
getEditDomain().getCommandStack().execute(
new ICommandProxy(operation));
openWithSeparateEditor();
}
});
}
} else {
openWithSeparateEditor();
}
}
/**
* @generated
*/
public class CallTemplateMediatorFigure extends EsbGraphicalShapeWithLabel {
/**
* @generated
*/
private WrappingLabel fFigureCallTemplateMediatorPropertyValue;
private WrappingLabel callTemplateMediatorDescriptionLabel;
/**
* @generated
*/
public CallTemplateMediatorFigure() {
this.setBackgroundColor(THIS_BACK);
createContents();
}
/**
* @generated NOT
*/
private void createContents() {
fFigureCallTemplateMediatorPropertyValue = new WrappingLabel();
fFigureCallTemplateMediatorPropertyValue.setText("<...>");
fFigureCallTemplateMediatorPropertyValue.setAlignment(SWT.CENTER);
//this.getPropertyValueRectangle1().add(fFigureCallTemplateMediatorPropertyValue);
callTemplateMediatorDescriptionLabel = getPropertyNameLabel();
}
/**
* @generated
*/
public WrappingLabel getFigureCallTemplateMediatorPropertyValue() {
return fFigureCallTemplateMediatorPropertyValue;
}
public WrappingLabel getCallTemplateMediatorDescriptionLabel() {
return callTemplateMediatorDescriptionLabel;
}
public String getIconPath() {
return "icons/ico20/callTemplate-mediator.gif";
}
public String getNodeName() {
return "CallTemplate";
}
public IFigure getToolTip() {
return new Label("CallTemplate");
}
}
/**
* @generated
*/
static final Color THIS_BACK = new Color(null, 230, 230, 230);
}
| apache-2.0 |
GerritCodeReview/gerrit-attic | src/main/java/com/google/gerrit/server/http/GerritJsonServletProvider.java | 1559 | // Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.http;
import com.google.gwtjsonrpc.client.RemoteJsonService;
import com.google.inject.AbstractModule;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Provider;
/** Creates {@link GerritJsonServlet} with a {@link RemoteJsonService}. */
class GerritJsonServletProvider implements Provider<GerritJsonServlet> {
@Inject
private Injector injector;
private final Class<? extends RemoteJsonService> serviceClass;
@Inject
GerritJsonServletProvider(final Class<? extends RemoteJsonService> c) {
serviceClass = c;
}
@Override
public GerritJsonServlet get() {
final RemoteJsonService srv = injector.getInstance(serviceClass);
return injector.createChildInjector(new AbstractModule() {
@Override
protected void configure() {
bind(RemoteJsonService.class).toInstance(srv);
}
}).getInstance(GerritJsonServlet.class);
}
}
| apache-2.0 |
sjaco002/incubator-asterixdb-hyracks | hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/SplitOperatorDescriptor.java | 8364 | /*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.hyracks.dataflow.std.misc;
import java.nio.ByteBuffer;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.comm.VSizeFrame;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.ActivityId;
import edu.uci.ics.hyracks.api.dataflow.IActivityGraphBuilder;
import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
import edu.uci.ics.hyracks.api.dataflow.TaskId;
import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractActivityNode;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputOperatorNodePushable;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
public class SplitOperatorDescriptor extends AbstractOperatorDescriptor {
private static final long serialVersionUID = 1L;
private final static int SPLITTER_MATERIALIZER_ACTIVITY_ID = 0;
private final static int MATERIALIZE_READER_ACTIVITY_ID = 1;
private boolean[] outputMaterializationFlags;
private boolean requiresMaterialization;
private int numberOfNonMaterializedOutputs = 0;
private int numberOfActiveMaterializeReaders = 0;
public SplitOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor rDesc, int outputArity) {
this(spec, rDesc, outputArity, new boolean[outputArity]);
}
public SplitOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor rDesc, int outputArity,
boolean[] outputMaterializationFlags) {
super(spec, 1, outputArity);
for (int i = 0; i < outputArity; i++) {
recordDescriptors[i] = rDesc;
}
this.outputMaterializationFlags = outputMaterializationFlags;
requiresMaterialization = false;
for (boolean flag : outputMaterializationFlags) {
if (flag) {
requiresMaterialization = true;
break;
}
}
}
@Override
public void contributeActivities(IActivityGraphBuilder builder) {
SplitterMaterializerActivityNode sma = new SplitterMaterializerActivityNode(new ActivityId(odId,
SPLITTER_MATERIALIZER_ACTIVITY_ID));
builder.addActivity(this, sma);
builder.addSourceEdge(0, sma, 0);
int taskOutputIndex = 0;
for (int i = 0; i < outputArity; i++) {
if (!outputMaterializationFlags[i]) {
builder.addTargetEdge(i, sma, taskOutputIndex);
taskOutputIndex++;
}
}
numberOfNonMaterializedOutputs = taskOutputIndex;
if (requiresMaterialization) {
int activityId = MATERIALIZE_READER_ACTIVITY_ID;
for (int i = 0; i < outputArity; i++) {
if (outputMaterializationFlags[i]) {
MaterializeReaderActivityNode mra = new MaterializeReaderActivityNode(new ActivityId(odId,
activityId));
builder.addActivity(this, mra);
builder.addTargetEdge(i, mra, 0);
builder.addBlockingEdge(sma, mra);
numberOfActiveMaterializeReaders++;
activityId++;
}
}
}
}
private final class SplitterMaterializerActivityNode extends AbstractActivityNode {
private static final long serialVersionUID = 1L;
public SplitterMaterializerActivityNode(ActivityId id) {
super(id);
}
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) {
return new AbstractUnaryInputOperatorNodePushable() {
private MaterializerTaskState state;
private final IFrameWriter[] writers = new IFrameWriter[numberOfNonMaterializedOutputs];
@Override
public void open() throws HyracksDataException {
if (requiresMaterialization) {
state = new MaterializerTaskState(ctx.getJobletContext().getJobId(), new TaskId(
getActivityId(), partition));
state.open(ctx);
}
for (int i = 0; i < numberOfNonMaterializedOutputs; i++) {
writers[i].open();
}
}
@Override
public void nextFrame(ByteBuffer bufferAccessor) throws HyracksDataException {
if (requiresMaterialization) {
state.appendFrame(bufferAccessor);
}
for (int i = 0; i < numberOfNonMaterializedOutputs; i++) {
FrameUtils.flushFrame(bufferAccessor, writers[i]);
}
}
@Override
public void close() throws HyracksDataException {
if (requiresMaterialization) {
state.close();
ctx.setStateObject(state);
}
for (int i = 0; i < numberOfNonMaterializedOutputs; i++) {
writers[i].close();
}
}
@Override
public void fail() throws HyracksDataException {
for (int i = 0; i < numberOfNonMaterializedOutputs; i++) {
writers[i].fail();
}
}
@Override
public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
writers[index] = writer;
}
};
}
}
private final class MaterializeReaderActivityNode extends AbstractActivityNode {
private static final long serialVersionUID = 1L;
public MaterializeReaderActivityNode(ActivityId id) {
super(id);
}
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
throws HyracksDataException {
return new AbstractUnaryOutputSourceOperatorNodePushable() {
@Override
public void initialize() throws HyracksDataException {
MaterializerTaskState state = (MaterializerTaskState) ctx.getStateObject(new TaskId(new ActivityId(
getOperatorId(), SPLITTER_MATERIALIZER_ACTIVITY_ID), partition));
state.writeOut(writer, new VSizeFrame(ctx));
}
@Override
public void deinitialize() throws HyracksDataException {
numberOfActiveMaterializeReaders--;
MaterializerTaskState state = (MaterializerTaskState) ctx.getStateObject(new TaskId(new ActivityId(
getOperatorId(), SPLITTER_MATERIALIZER_ACTIVITY_ID), partition));
if (numberOfActiveMaterializeReaders == 0) {
state.deleteFile();
}
}
};
}
}
}
| apache-2.0 |
abalufaske/whereIsSmartwatch | WhereIsSmartwatch/src/abalufaske/where/is/ManagedControlExtension.java | 3592 | /*
Copyright (c) 2011, Sony Ericsson Mobile Communications AB
Copyright (c) 2011-2013, Sony Mobile Communications AB
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the Sony Ericsson Mobile Communications AB / Sony Mobile
Communications AB nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package abalufaske.where.is;
import android.content.Context;
import android.content.Intent;
import com.sonyericsson.extras.liveware.extension.util.control.ControlExtension;
public class ManagedControlExtension extends ControlExtension {
/**
* Name for extra data put in ManagedControlExtension intents. Identifies
* boolean data. If the following boolean value is set to true, the new
* extension is not kept in the history stack. As soon as the user navigates
* away from it, the extension is not available on the back stack.
*/
public static final String EXTENSION_NO_HISTORY = "EXTENSION_NO_HISTORY";
/**
* Name for extra data put in ManagedControlExtension intents. Identifies
* boolean data. If the following boolean value is set to true, extension
* handles presses of back button. As this can break the extension's
* navigation pattern this should be used with caution;
*/
public static final String EXTENSION_OVERRIDES_BACK = "EXTENSION_OVERRIDES_BACK";
private Intent mIntent;
protected ControlManagerSmartWatch2 mControlManager;
/**
* Constructor for ManagedControlExtension. Should not be called directly,
* is called by ControlManager.
*
* @param context
* @param hostAppPackageName
* @param controlManager the ControlManager that handles this extension's
* lifecycle
* @param intent The intent used to handle the state of the
* ManagedControlExtension
*/
public ManagedControlExtension(Context context, String hostAppPackageName,
ControlManagerSmartWatch2 controlManager, Intent intent) {
super(context, hostAppPackageName);
this.mIntent = intent;
mControlManager = controlManager;
}
/**
* @return Return the intent that started this controlExtension.
*/
public Intent getIntent() {
return mIntent;
}
}
| apache-2.0 |
ivanjelinek/konektor-FB | src/fbconnector/Settings.java | 6961 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package fbconnector;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.TreeMap;
/**
*
* @author Ivan Jelínek
*/
class Settings {
private String configURL = "config.txt";
private String ipES;
private TreeMap<String, String> segmentMap = new TreeMap();
private String portES;
private String indexES;
private String typeES;
private String appID;
private ArrayList<String> targetList = new ArrayList<>();
private int pageLimit;
private String appToken = "";
private Settings thisSetting = null;
private String appAccessToken;
private String appSecret;
private String sentimentModel;
public Settings(String configPath) {
this.configURL = configPath;
loadConfig();
}
public Settings() {
loadConfig();
}
public String getSentimentModel(){
return this.sentimentModel;
}
public Settings getThisSettings() {
return this.thisSetting;
}
public String getAppID() {
return this.appID;
}
public String getAccessToken() {
return this.appAccessToken;
}
public String getAppSecret(){
return this.appSecret;
}
public String getAppToken() {
return this.appToken;
}
public int getLimitPages() {
return this.pageLimit;
}
public String getIpES() {
return ipES;
}
public String getPortES() {
return portES;
}
public String getIndexES() {
return indexES;
}
public String getTypeES() {
return typeES;
}
public int getPageLimit() {
return pageLimit;
}
public String getSegmentForPage(String pageName) {
for (String key : segmentMap.keySet()) {
if (pageName.equalsIgnoreCase(key)) {
return segmentMap.get(key);
}
}
return "default";
}
public String[] getFBPages() {
String[] array = new String[targetList.size()];
int i = 0;
for (String s : this.targetList) {
array[i] = targetList.get(i);
i++;
}
/* String[] array = {
"Burinka.stavebni.sporitelna",
"RaiffeisenbankCZ",
"gemoney.cz",
"Airbank",
"zuno.cz",
"mBank.cz",
"equabank",
"homecredit",
"ods.cz",
"cssdcz",
"kducsl",
"hospodarskenovinycz",
"xportprague",
"tydenikeuro",
"DATART.cz",
"ceskasporitelna",
"Ceskaposta",
"komercni.banka",
"ceskatelevize",
"ihned.cz",
"denikE15",
"ceska.piratska.strana",
"novatelevize",
"iDNES.cz"
};*/
return array;
}
private void loadConfig() {
BufferedReader br = null;
System.out.println(new Date() + " Loading config.");
try {
br = new BufferedReader(new FileReader(this.configURL));
String line = br.readLine();
String[] pole;
while (line != null) {
/* if (line.toLowerCase().contains("[license]")) {
while (!line.toLowerCase().contains("[server]")) {
line = br.readLine();
pole = line.split(":");
if (pole[0].equals("licensekey")) {
//licensekey:veovber
License.checkLicense(pole[1]);
}
}
}*/
if (line.toLowerCase().contains("[server]")) {
while (!line.toLowerCase().contains("[task]")) {
line = br.readLine();
if (!line.startsWith("//")) { //možnost komentovat v config.txt - začátek řádku je //
pole = line.split(":");
if (pole[0].equals("ip")) {
//ip:192.168.0.0
this.ipES = pole[1];
}
if (pole[0].equals("port")) {
this.portES = pole[1];
}
if (pole[0].equals("appToken")) {
this.appToken = pole[1];
}
if (pole[0].equals("pageLimit")) {
this.pageLimit = Integer.parseInt(pole[1]);
}
if (pole[0].equals("index")) {
this.indexES = pole[1];
}
if (pole[0].equals("type")) {
this.typeES = pole[1];
}
if (pole[0].equals("appID")) {
this.appID = pole[1];
}
if (pole[0].equals("appAccessToken")) {
this.appAccessToken = pole[1];
}
if (pole[0].equals("appSecret")) {
this.appSecret = pole[1];
}
if (pole[0].equals("sentimentModel")) {
this.sentimentModel = pole[1];
}
}
}
}
if (line.toLowerCase().contains("[task]")) {
// line = br.readLine();
while (!line.toLowerCase().contains("[end]")) {
if (!line.startsWith("//")) { //možnost komentovat v config.txt - začátek řádku je //
pole = line.split(":");
if (pole[0].equals("download")) {
//ip:192.168.0.0
this.targetList.add(pole[1]);
this.segmentMap.put(pole[1], pole[3]);
}
}
line = br.readLine();
if (line.contains("[end]")) {
break;
}
}
}
if (line.contains("[end]")) {
break;
}
line = br.readLine();
}
} catch (IOException ex) {
System.out.println(new Date() + " IO Exception in reading config.");
}
System.out.println(new Date() + " Config loaded.");
}
}
| apache-2.0 |
stumoodie/CompoundGraph | test/uk/ac/ed/inf/graph/compound/newimpl/SubCompoundGraphWithRemovedNodesAndRootTest.java | 11390 | /*
Licensed to the Court of the University of Edinburgh (UofE) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The UofE licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package uk.ac.ed.inf.graph.compound.newimpl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.jmock.Mockery;
import org.jmock.integration.junit4.JMock;
import org.jmock.integration.junit4.JUnit4Mockery;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import uk.ac.ed.inf.graph.compound.CompoundNodePair;
import uk.ac.ed.inf.graph.compound.ICompoundEdge;
import uk.ac.ed.inf.graph.compound.ICompoundGraphElement;
import uk.ac.ed.inf.graph.compound.ICompoundNode;
import uk.ac.ed.inf.graph.compound.testfixture.ComplexGraphFixture;
import uk.ac.ed.inf.graph.compound.testfixture.IteratorTestUtility;
@RunWith(JMock.class)
public class SubCompoundGraphWithRemovedNodesAndRootTest {
private static final int EXPECTED_NUM_EDGES = 4;
private static final int EXPECTED_NUM_NODES = 7;
private static final int EXPECTED_NUM_TOP_NODES = 1;
private static final int EXPECTED_NUM_ELEMENTS = 11;
private static final int EXPECTED_NUM_TOP_ELEMENTS = 1;
private static final int EXPECTED_NUM_TOP_EDGES = 0;
private SubCompoundGraph testInstance;
private Mockery mockery;
private ComplexGraphFixture testFixture;
@Before
public void setUp() throws Exception {
this.mockery = new JUnit4Mockery();
this.testFixture = new ComplexGraphFixture(this.mockery, "");
this.testFixture.buildFixture();
this.testInstance = new SubCompoundGraph(this.testFixture.getGraph());
this.testInstance.addTopElement(this.testFixture.getRootNode());
this.testFixture.setElementRemoved(ComplexGraphFixture.NODE2_ID, true);
this.testFixture.setElementRemoved(ComplexGraphFixture.EDGE2_ID, true);
this.testFixture.setElementRemoved(ComplexGraphFixture.EDGE3_ID, true);
this.testFixture.setElementRemoved(ComplexGraphFixture.EDGE4_ID, true);
this.testFixture.setElementRemoved(ComplexGraphFixture.NODE4_ID, true);
}
@After
public void tearDown() throws Exception {
this.mockery =null;
this.testFixture = null;
this.testInstance = null;
}
@Test
public void testContainsConnectionICompoundNodeICompoundNode() {
CompoundNodePair testPair = this.testFixture.getEdge3().getConnectedNodes();
assertTrue("connection exists", this.testInstance.containsConnection(testPair.getOutNode(), testPair.getInNode()));
}
@Test
public void testContainsConnectionCompoundNodePair() {
CompoundNodePair testPair = this.testFixture.getEdge2().getConnectedNodes();
assertTrue("connection exists", this.testInstance.containsConnection(testPair));
CompoundNodePair testOutsidePair = this.testFixture.getEdge2().getConnectedNodes();
assertTrue("no connection exists", this.testInstance.containsConnection(testOutsidePair));
}
@Test
public void testContainsDirectedEdgeICompoundNodeICompoundNode() {
CompoundNodePair testPair = this.testFixture.getEdge2().getConnectedNodes();
assertTrue("directed edge exists", this.testInstance.containsDirectedEdge(testPair.getOutNode(), testPair.getInNode()));
CompoundNodePair reversedPair = this.testFixture.getEdge2().getConnectedNodes().reversedNodes();
assertFalse("reversed directed edge not exists", this.testInstance.containsDirectedEdge(reversedPair.getOutNode(), testPair.getInNode()));
}
@Test
public void testContainsConnectionRemovedCompoundNodePair() {
CompoundNodePair testPair = this.testFixture.getEdge3().getConnectedNodes();
assertTrue("connection exists", this.testInstance.containsConnection(testPair));
}
@Test
public void testContainsDirectedEdgeRemovedICompoundNodeICompoundNode() {
CompoundNodePair testPair = this.testFixture.getEdge3().getConnectedNodes();
assertTrue("directed edge exists", this.testInstance.containsDirectedEdge(testPair.getOutNode(), testPair.getInNode()));
}
@Test
public void testContainsDirectedEdgeCompoundNodePair() {
CompoundNodePair testPair = this.testFixture.getEdge3().getConnectedNodes();
assertTrue("directed edge exists", this.testInstance.containsDirectedEdge(testPair));
}
@Test
public void testContainsEdgeICompoundEdge() {
ICompoundEdge testEdge = this.testFixture.getEdge3();
assertTrue("edge exists", this.testInstance.containsEdge(testEdge));
}
@Test
public void testContainsEdgeInt() {
ICompoundEdge testEdge = this.testFixture.getEdge3();
assertTrue("edge exists", this.testInstance.containsEdge(testEdge.getIndex()));
}
@Test
public void testContainsElementICompoundGraphElement() {
assertTrue("element present", this.testInstance.containsElement(this.testFixture.getRootNode()));
assertTrue("element present", this.testInstance.containsElement(this.testFixture.getEdge3()));
assertTrue("element present", this.testInstance.containsElement(this.testFixture.getEdge2()));
assertTrue("element present", this.testInstance.containsElement(this.testFixture.getNode1()));
}
@Test
public void testContainsElementInt() {
assertTrue("element present", this.testInstance.containsElement(this.testFixture.getRootNode().getIndex()));
assertTrue("element present", this.testInstance.containsElement(this.testFixture.getEdge3().getIndex()));
assertTrue("element present", this.testInstance.containsElement(this.testFixture.getNode4().getIndex()));
assertTrue("element present", this.testInstance.containsElement(this.testFixture.getNode1().getIndex()));
}
@Test
public void testContainsNodeInt() {
assertTrue("node present", this.testInstance.containsNode(this.testFixture.getNode4().getIndex()));
assertTrue("root node present", this.testInstance.containsNode(this.testFixture.getRootNode().getIndex()));
}
@Test
public void testContainsNodeICompoundNode() {
assertTrue("root node present", this.testInstance.containsNode(this.testFixture.getRootNode()));
assertTrue("element not present", this.testInstance.containsNode(this.testFixture.getNode4()));
assertTrue("element present", this.testInstance.containsNode(this.testFixture.getNode2()));
assertTrue("element present", this.testInstance.containsNode(this.testFixture.getNode1()));
}
@Test
public void testContainsRoot() {
assertTrue("contains root", this.testInstance.containsRoot());
}
@Test
public void testEdgeIterator() {
IteratorTestUtility<ICompoundEdge> testIter = new IteratorTestUtility<ICompoundEdge>(this.testFixture.getEdge1(), this.testFixture.getEdge2(),
this.testFixture.getEdge3(), this.testFixture.getEdge4());
testIter.testSortedIterator(this.testInstance.edgeIterator());
}
@Test
public void testElementIterator() {
IteratorTestUtility<ICompoundGraphElement> testIter = new IteratorTestUtility<ICompoundGraphElement>(this.testFixture.getRootNode(), this.testFixture.getNode1(), this.testFixture.getNode2(),
this.testFixture.getNode3(), this.testFixture.getNode4(), this.testFixture.getNode5(), this.testFixture.getNode6(),
this.testFixture.getEdge1(), this.testFixture.getEdge2(),
this.testFixture.getEdge3(), this.testFixture.getEdge4());
testIter.testSortedIterator(this.testInstance.elementIterator());
}
@Test
public void testGetEdge() {
assertEquals("expected edge", this.testFixture.getEdge2(), this.testInstance.getEdge(this.testFixture.getEdge2().getIndex()));
}
@Test
public void testGetElement() {
assertEquals("expected edge", this.testFixture.getEdge2(), this.testInstance.getElement(this.testFixture.getEdge2().getIndex()));
assertEquals("expected node", this.testFixture.getNode4(), this.testInstance.getElement(this.testFixture.getNode4().getIndex()));
}
@Test
public void testGetNode() {
assertEquals("expected node", this.testFixture.getNode4(), this.testInstance.getNode(this.testFixture.getNode4().getIndex()));
}
@Test
public void testGetNumEdges() {
assertEquals("expected num", EXPECTED_NUM_EDGES, this.testInstance.getNumEdges());
}
@Test
public void testGetNumNodes() {
assertEquals("expected num", EXPECTED_NUM_NODES, this.testInstance.getNumNodes());
}
@Test
public void testGetNumTopNodes() {
assertEquals("expected num", EXPECTED_NUM_TOP_NODES, this.testInstance.getNumTopNodes());
}
@Test
public void testGetNumTopEdges() {
assertEquals("expected num", EXPECTED_NUM_TOP_EDGES, this.testInstance.getNumTopEdges());
}
@Test
public void testGetSuperGraph() {
assertEquals("expected graph", this.testFixture.getGraph(), this.testInstance.getSuperGraph());
}
@Test
public void testIsConsistentSnapShot() {
assertFalse("inconsistent snapshot", this.testInstance.isConsistentSnapShot());
}
@Test
public void testIsInducedSubgraph() {
assertTrue("is induced subgraph", this.testInstance.isInducedSubgraph());
}
@Test
public void testNodeIterator() {
IteratorTestUtility<ICompoundNode> testIter = new IteratorTestUtility<ICompoundNode>(this.testFixture.getRootNode(), this.testFixture.getNode1(), this.testFixture.getNode2(),
this.testFixture.getNode3(), this.testFixture.getNode4(), this.testFixture.getNode5(), this.testFixture.getNode6());
testIter.testSortedIterator(this.testInstance.nodeIterator());
}
@Test
public void testNumElements() {
assertEquals("expected num", EXPECTED_NUM_ELEMENTS, this.testInstance.numElements());
}
@Test
public void testNumTopElements() {
assertEquals("expected num", EXPECTED_NUM_TOP_ELEMENTS, this.testInstance.numTopElements());
}
@Test
public void testTopElementIterator() {
IteratorTestUtility<ICompoundGraphElement> testIter = new IteratorTestUtility<ICompoundGraphElement>(this.testFixture.getRootNode());
testIter.testSortedIterator(this.testInstance.topElementIterator());
}
@Test
public void testTopNodeIterator() {
IteratorTestUtility<ICompoundNode> testIter = new IteratorTestUtility<ICompoundNode>(this.testFixture.getRootNode());
testIter.testSortedIterator(this.testInstance.topNodeIterator());
}
@Test
public void testTopEdgeIterator() {
IteratorTestUtility<ICompoundEdge> testIter = new IteratorTestUtility<ICompoundEdge>();
testIter.testSortedIterator(this.testInstance.topEdgeIterator());
}
@Test
public void testEdgeLastElementIterator() {
IteratorTestUtility<ICompoundGraphElement> testIter = new IteratorTestUtility<ICompoundGraphElement>(this.testFixture.getRootNode(), this.testFixture.getNode1(), this.testFixture.getNode2(),
this.testFixture.getEdge3(), this.testFixture.getNode6(), this.testFixture.getEdge1(), this.testFixture.getNode3(), this.testFixture.getNode5(), this.testFixture.getEdge2(), this.testFixture.getNode4(),
this.testFixture.getEdge4());
testIter.testIterator(this.testInstance.edgeLastElementIterator());
}
}
| apache-2.0 |
FFY00/deobfuscator | src/main/java/com/javadeobfuscator/deobfuscator/org/objectweb/asm/util/Textifiable.java | 2493 | /**
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2011 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.javadeobfuscator.deobfuscator.org.objectweb.asm.util;
import java.util.Map;
import com.javadeobfuscator.deobfuscator.org.objectweb.asm.Label;
/**
* An {@link com.javadeobfuscator.deobfuscator.org.objectweb.asm.Attribute Attribute} that can print a readable
* representation of itself.
*
* Implementations should construct readable output from an attribute data
* structure. Such representation could be used in unit test assertions.
*
* @author Eugene Kuleshov
*/
public interface Textifiable {
/**
* Build a human readable representation of this attribute.
*
* @param buf
* a buffer used for printing Java code.
* @param labelNames
* map of label instances to their names.
*/
void textify(StringBuffer buf, Map<Label, String> labelNames);
}
| apache-2.0 |
hitakaken/ndp | ndp-kernel/src/main/java/com/novbank/ndp/kernel/util/SecureHash.java | 9833 | package com.novbank.ndp.kernel.util;
import com.novbank.ndp.kernel.exception.RepositoryRuntimeException;
import org.apache.commons.codec.binary.Hex;
import java.io.*;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetEncoder;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Created by hp on 2015/5/18.
*/
public class SecureHash {
public static byte[] getHash(SecureHash.Algorithm algorithm, byte[] content) throws NoSuchAlgorithmException {
checkNotNull(algorithm, "algorithm");
return getHash(algorithm.digestName(), content);
}
public static byte[] getHash(SecureHash.Algorithm algorithm, File file) throws NoSuchAlgorithmException, IOException {
checkNotNull(algorithm, "algorithm");
return getHash(algorithm.digestName(), file);
}
public static byte[] getHash(SecureHash.Algorithm algorithm, InputStream stream) throws NoSuchAlgorithmException, IOException {
checkNotNull(algorithm, "algorithm");
return getHash(algorithm.digestName(), stream);
}
public static byte[] getHash(String digestName, byte[] content) throws NoSuchAlgorithmException {
MessageDigest digest = MessageDigest.getInstance(digestName);
assert digest != null;
return digest.digest(content);
}
public static byte[] getHash(String digestName, File file) throws NoSuchAlgorithmException, IOException {
checkNotNull(file, "file");
MessageDigest digest = MessageDigest.getInstance(digestName);
assert digest != null;
BufferedInputStream in = new BufferedInputStream(new FileInputStream(file));
boolean error = false;
try {
short e = 1024;
byte[] buffer = new byte[e];
for(int n = in.read(buffer, 0, e); n != -1; n = in.read(buffer, 0, e)) {
digest.update(buffer, 0, n);
}
} catch (IOException var15) {
error = true;
throw var15;
} finally {
try {
in.close();
} catch (IOException var14) {
if(!error) {
throw var14;
}
}
}
return digest.digest();
}
public static byte[] getHash(String digestName, InputStream stream) throws NoSuchAlgorithmException, IOException {
checkNotNull(stream, "stream");
MessageDigest digest = MessageDigest.getInstance(digestName);
assert digest != null;
short bufSize = 1024;
byte[] buffer = new byte[bufSize];
for(int n = stream.read(buffer, 0, bufSize); n != -1; n = stream.read(buffer, 0, bufSize)) {
digest.update(buffer, 0, n);
}
return digest.digest();
}
public static SecureHash.HashingInputStream createHashingStream(SecureHash.Algorithm algorithm, InputStream inputStream) throws NoSuchAlgorithmException {
return createHashingStream(algorithm.digestName(), inputStream);
}
public static SecureHash.HashingInputStream createHashingStream(String digestName, InputStream inputStream) throws NoSuchAlgorithmException {
MessageDigest digest = MessageDigest.getInstance(digestName);
return new SecureHash.HashingInputStream(digest, inputStream);
}
public static SecureHash.HashingReader createHashingReader(SecureHash.Algorithm algorithm, Reader reader, Charset charset) throws NoSuchAlgorithmException {
return createHashingReader(algorithm.digestName(), reader, charset);
}
public static SecureHash.HashingReader createHashingReader(String digestName, Reader reader, Charset charset) throws NoSuchAlgorithmException {
MessageDigest digest = MessageDigest.getInstance(digestName);
return new SecureHash.HashingReader(digest, reader, charset);
}
public static String asHexString(byte[] hash) {
return hash != null? Hex.encodeHexString(hash):null;
}
public static String sha1(String string) {
try {
byte[] e = getHash(SecureHash.Algorithm.SHA_1, string.getBytes());
return asHexString(e);
} catch (NoSuchAlgorithmException var2) {
throw new RepositoryRuntimeException(var2);
}
}
private SecureHash() {
}
public static class HashingReader extends Reader {
private final MessageDigest digest;
private final Reader stream;
private byte[] hash;
private final CharsetEncoder encoder;
protected HashingReader(MessageDigest digest, Reader input, Charset charset) {
this.digest = digest;
this.stream = input;
this.encoder = charset.newEncoder();
}
public int read() throws IOException {
int result = this.stream.read();
if(result != -1) {
this.digest.update((byte)result);
}
return result;
}
public int read(char[] b, int off, int len) throws IOException {
int n = this.stream.read(b, off, len);
if(n != -1) {
byte[] bytes = this.encoder.encode(CharBuffer.wrap(b)).array();
this.digest.update(bytes, off, n);
}
return n;
}
public int read(char[] b) throws IOException {
int n = this.stream.read(b);
if(n != -1) {
byte[] bytes = this.encoder.encode(CharBuffer.wrap(b)).array();
this.digest.update(bytes, 0, n);
}
return n;
}
public void close() throws IOException {
this.stream.close();
if(this.hash == null) {
this.hash = this.digest.digest();
}
}
public byte[] getHash() {
return this.hash;
}
public String getHashAsHexString() {
return SecureHash.asHexString(this.hash);
}
}
public static class HashingInputStream extends InputStream {
private final MessageDigest digest;
private final InputStream stream;
private byte[] hash;
protected HashingInputStream(MessageDigest digest, InputStream input) {
this.digest = digest;
this.stream = input;
}
public int read() throws IOException {
int result = this.stream.read();
if(result != -1) {
this.digest.update((byte)result);
}
return result;
}
public int read(byte[] b, int off, int len) throws IOException {
int n = this.stream.read(b, off, len);
if(n != -1) {
this.digest.update(b, off, n);
}
return n;
}
public int read(byte[] b) throws IOException {
int n = this.stream.read(b);
if(n != -1) {
this.digest.update(b, 0, n);
}
return n;
}
public void close() throws IOException {
this.stream.close();
if(this.hash == null) {
this.hash = this.digest.digest();
}
}
public byte[] getHash() {
return this.hash;
}
public String getHashAsHexString() {
return SecureHash.asHexString(this.hash);
}
}
public enum Algorithm {
MD2("MD2", 128, "The MD2 message digest algorithm as defined in RFC 1319"),
MD5("MD5", 128, "The MD5 message digest algorithm as defined in RFC 1321"),
SHA_1("SHA-1", 160, "The Secure Hash Algorithm, as defined in Secure Hash Standard, NIST FIPS 180-1"),
SHA_256("SHA-256", 256, "New hash algorithms for which the draft Federal Information Processing Standard 180-2, Secure Hash Standard (SHS) is now available. SHA-256 is a 256-bit hash function intended to provide 128 bits of security against collision attacks."),
SHA_384("SHA-384", 384, "New hash algorithms for which the draft Federal Information Processing Standard 180-2, Secure Hash Standard (SHS) is now available. A 384-bit hash may be obtained by truncating the SHA-512 output."),
SHA_512("SHA-512", 512, "New hash algorithms for which the draft Federal Information Processing Standard 180-2, Secure Hash Standard (SHS) is now available. SHA-512 is a 512-bit hash function intended to provide 256 bits of security.");
private final String name;
private final String description;
private final int numberOfBits;
private final int numberOfBytes;
private final int numberOfHexChars;
private Algorithm(String name, int numberOfBits, String description) {
assert numberOfBits % 8 == 0;
this.name = name;
this.description = description;
this.numberOfBits = numberOfBits;
this.numberOfBytes = this.numberOfBits / 8;
this.numberOfHexChars = this.numberOfBits / 4;
}
public String digestName() {
return this.name;
}
public String description() {
return this.description;
}
public int getHexadecimalStringLength() {
return this.numberOfHexChars;
}
public boolean isHexadecimal(String string) {
return string.length() == this.getHexadecimalStringLength() && HexUtils.isHexString(string);
}
public int getNumberOfBytes() {
return this.numberOfBytes;
}
public int getNumberOfBits() {
return this.numberOfBits;
}
public String toString() {
return this.digestName();
}
}
}
| apache-2.0 |
lucheins/bluezone | NappDrawer-master/android/src/dk/napp/drawer/DrawerProxy.java | 13730 | /**
* Copyright (c) 2010-2013 by Napp ApS
* www.napp.dk
* Author Mads Møller
*
* Special thanks to Martin Guillon
*
* Appcelerator Titanium is Copyright (c) 2009-2013 by Appcelerator, Inc.
* and licensed under the Apache Public License (version 2)
*/
package dk.napp.drawer;
import java.lang.ref.WeakReference;
import org.appcelerator.kroll.KrollDict;
import org.appcelerator.kroll.annotations.Kroll;
import org.appcelerator.kroll.common.Log;
import org.appcelerator.titanium.TiActivity;
import org.appcelerator.titanium.TiActivityWindow;
import org.appcelerator.titanium.TiActivityWindows;
import org.appcelerator.titanium.TiApplication;
import org.appcelerator.titanium.TiBaseActivity;
import org.appcelerator.titanium.TiC;
import org.appcelerator.titanium.TiContext;
import org.appcelerator.titanium.proxy.TiWindowProxy;
import org.appcelerator.titanium.util.TiConvert;
import org.appcelerator.titanium.util.TiUIHelper;
import com.slidingmenu.lib.SlidingMenu;
import dk.napp.drawer.Drawer;
import android.app.Activity;
import android.content.Intent;
import android.os.Message;
@Kroll.proxy(creatableInModule=NappdrawerModule.class)
public class DrawerProxy extends TiWindowProxy implements TiActivityWindow
{
private static final String TAG = "NappDrawerProxy";
private static final int MSG_FIRST_ID = TiWindowProxy.MSG_LAST_ID + 1;
private static final int MSG_TOGGLE_LEFT_VIEW = MSG_FIRST_ID + 100;
private static final int MSG_TOGGLE_RIGHT_VIEW = MSG_FIRST_ID + 101;
private static final int MSG_OPEN_LEFT_VIEW = MSG_FIRST_ID + 102;
private static final int MSG_OPEN_RIGHT_VIEW = MSG_FIRST_ID + 103;
private static final int MSG_CLOSE_LEFT_VIEW = MSG_FIRST_ID + 104;
private static final int MSG_CLOSE_RIGHT_VIEW = MSG_FIRST_ID + 105;
private static final int MSG_CLOSE_VIEWS = MSG_FIRST_ID + 106;
protected static final int MSG_LAST_ID = MSG_FIRST_ID + 999;
private WeakReference<Activity> slideMenuActivity;
private WeakReference<SlidingMenu> slidingMenu;
public DrawerProxy()
{
super();
}
public DrawerProxy(TiContext tiContext)
{
this();
}
@Override
public boolean handleMessage(Message msg)
{
switch (msg.what) {
case MSG_TOGGLE_LEFT_VIEW: {
handleToggleLeftView((Boolean)msg.obj);
return true;
}
case MSG_TOGGLE_RIGHT_VIEW: {
handleToggleRightView((Boolean)msg.obj);
return true;
}
case MSG_OPEN_LEFT_VIEW: {
handleOpenLeftView((Boolean)msg.obj);
return true;
}
case MSG_OPEN_RIGHT_VIEW: {
handleOpenRightView((Boolean)msg.obj);
return true;
}
case MSG_CLOSE_LEFT_VIEW: {
handleCloseLeftView((Boolean)msg.obj);
return true;
}
case MSG_CLOSE_RIGHT_VIEW: {
handleCloseRightView((Boolean)msg.obj);
return true;
}
case MSG_CLOSE_VIEWS: {
handleCloseViews((Boolean)msg.obj);
return true;
}
default : {
return super.handleMessage(msg);
}
}
}
@Override
public void handleCreationDict(KrollDict options) {
super.handleCreationDict(options);
// Support setting orientation modes at creation.
Object orientationModes = options.get( TiC.PROPERTY_ORIENTATION_MODES);
if (orientationModes != null && orientationModes instanceof Object[]) {
try {
int[] modes = TiConvert.toIntArray((Object[]) orientationModes);
setOrientationModes(modes);
} catch (ClassCastException e) {
Log.e(TAG, "Invalid orientationMode array. Must only contain orientation mode constants.");
}
}
}
@Override
protected void handleOpen(KrollDict options)
{
Activity topActivity = TiApplication.getAppCurrentActivity();
Intent intent = new Intent(topActivity, TiActivity.class);
fillIntent(topActivity, intent);
int windowId = TiActivityWindows.addWindow(this);
intent.putExtra(TiC.INTENT_PROPERTY_USE_ACTIVITY_WINDOW, true);
intent.putExtra(TiC.INTENT_PROPERTY_WINDOW_ID, windowId);
topActivity.startActivity(intent);
}
@Override
public void windowCreated(TiBaseActivity activity) {
slideMenuActivity = new WeakReference<Activity>(activity);
activity.setWindowProxy(this);
setActivity(activity);
view = new Drawer(this, activity);
slidingMenu = new WeakReference<SlidingMenu>(((Drawer)view).getSlidingMenu());
setModelListener(view);
handlePostOpen();
// Push the tab group onto the window stack. It needs to intercept
// stack changes to properly dispatch tab focus and blur events
// when windows open and close on top of it.
activity.addWindowToStack(this);
}
@Override
public void handlePostOpen()
{
super.handlePostOpen();
opened = true;
// First open before we load and focus our first tab.
fireEvent(TiC.EVENT_OPEN, null);
// Setup the new tab activity like setting orientation modes.
onWindowActivityCreated();
}
@Override
protected void handleClose(KrollDict options)
{
Log.d(TAG, "handleClose: " + options, Log.DEBUG_MODE);
modelListener = null;
releaseViews();
view = null;
opened = false;
Activity activity = slideMenuActivity.get();
if (activity != null && !activity.isFinishing()) {
activity.finish();
}
}
@Override
public void closeFromActivity(boolean activityIsFinishing) {
// Call super to fire the close event on the tab group.
// This event must fire after each tab has been closed.
super.closeFromActivity(activityIsFinishing);
}
@Override
public void onWindowFocusChange(boolean focused) {
if (focused){
fireEvent(TiC.EVENT_FOCUS, null);
} else {
fireEvent(TiC.EVENT_BLUR, null);
}
}
private void fillIntent(Activity activity, Intent intent)
{
if (hasProperty(TiC.PROPERTY_FULLSCREEN)) {
intent.putExtra(TiC.PROPERTY_FULLSCREEN, TiConvert.toBoolean(getProperty(TiC.PROPERTY_FULLSCREEN)));
}
if (hasProperty(TiC.PROPERTY_NAV_BAR_HIDDEN)) {
intent.putExtra(TiC.PROPERTY_NAV_BAR_HIDDEN, TiConvert.toBoolean(getProperty(TiC.PROPERTY_NAV_BAR_HIDDEN)));
}
if (hasProperty(TiC.PROPERTY_WINDOW_SOFT_INPUT_MODE)) {
intent.putExtra(TiC.PROPERTY_WINDOW_SOFT_INPUT_MODE, TiConvert.toInt(getProperty(TiC.PROPERTY_WINDOW_SOFT_INPUT_MODE)));
}
if (hasProperty(TiC.PROPERTY_EXIT_ON_CLOSE)) {
intent.putExtra(TiC.INTENT_PROPERTY_FINISH_ROOT, TiConvert.toBoolean(getProperty(TiC.PROPERTY_EXIT_ON_CLOSE)));
} else {
intent.putExtra(TiC.INTENT_PROPERTY_FINISH_ROOT, activity.isTaskRoot());
}
}
@Override
public KrollDict handleToImage()
{
return TiUIHelper.viewToImage(new KrollDict(), getActivity().getWindow().getDecorView());
}
@Override
public void releaseViews()
{
super.releaseViews();
}
@Override
protected Activity getWindowActivity()
{
return (slideMenuActivity != null) ? slideMenuActivity.get() : null;
}
private void handleToggleLeftView(boolean animated)
{
SlidingMenu menu = slidingMenu.get();
menu.toggle(animated);
}
private void handleToggleRightView(boolean animated)
{
SlidingMenu menu = slidingMenu.get();
menu.toggleSecondary(animated);
}
private void handleOpenLeftView(boolean animated)
{
SlidingMenu menu = slidingMenu.get();
menu.showMenu(animated);
}
private void handleOpenRightView(boolean animated)
{
SlidingMenu menu = slidingMenu.get();
menu.showSecondaryMenu(animated);
}
private void handleCloseLeftView(boolean animated)
{
SlidingMenu menu = slidingMenu.get();
if (menu.isMenuShowing())
menu.showContent(animated);
}
private void handleCloseRightView(boolean animated)
{
SlidingMenu menu = slidingMenu.get();
if (menu.isSecondaryMenuShowing())
menu.showContent(animated);
}
private void handleCloseViews(boolean animated)
{
SlidingMenu menu = slidingMenu.get();
if (menu.isMenuShowing() || menu.isSecondaryMenuShowing())
menu.showContent(animated);
}
public SlidingMenu getSlidingMenu(){
return slidingMenu.get();
}
/*
* METHODS
*/
@Kroll.method
public void toggleLeftWindow(@Kroll.argument(optional = true) Object obj)
{
Boolean animated = true;
if (obj != null) {
animated = TiConvert.toBoolean(obj);
}
if (TiApplication.isUIThread()) {
handleToggleLeftView(animated);
return;
}
Message message = getMainHandler().obtainMessage(MSG_TOGGLE_LEFT_VIEW, animated);
message.sendToTarget();
}
@Kroll.method
public void toggleRightWindow(@Kroll.argument(optional = true) Object obj)
{
Boolean animated = true;
if (obj != null) {
animated = TiConvert.toBoolean(obj);
}
if (TiApplication.isUIThread()) {
handleToggleRightView(animated);
return;
}
Message message = getMainHandler().obtainMessage(MSG_TOGGLE_RIGHT_VIEW, animated);
message.sendToTarget();
}
@Kroll.method
public void openLeftWindow(@Kroll.argument(optional = true) Object obj)
{
Boolean animated = true;
if (obj != null) {
animated = TiConvert.toBoolean(obj);
}
if (TiApplication.isUIThread()) {
handleOpenLeftView(animated);
return;
}
Message message = getMainHandler().obtainMessage(MSG_OPEN_LEFT_VIEW, animated);
message.sendToTarget();
}
@Kroll.method
public void openRightWindow(@Kroll.argument(optional = true) Object obj)
{
Boolean animated = true;
if (obj != null) {
animated = TiConvert.toBoolean(obj);
}
if (TiApplication.isUIThread()) {
handleOpenRightView(animated);
return;
}
Message message = getMainHandler().obtainMessage(MSG_OPEN_RIGHT_VIEW, animated);
message.sendToTarget();
}
@Kroll.method
public void closeLeftWindow(@Kroll.argument(optional = true) Object obj)
{
Boolean animated = true;
if (obj != null) {
animated = TiConvert.toBoolean(obj);
}
if (TiApplication.isUIThread()) {
handleCloseLeftView(animated);
return;
}
Message message = getMainHandler().obtainMessage(MSG_CLOSE_LEFT_VIEW, animated);
message.sendToTarget();
}
@Kroll.method
public void closeRightWindow(@Kroll.argument(optional = true) Object obj)
{
Boolean animated = true;
if (obj != null) {
animated = TiConvert.toBoolean(obj);
}
if (TiApplication.isUIThread()) {
handleCloseRightView(animated);
return;
}
Message message = getMainHandler().obtainMessage(MSG_CLOSE_RIGHT_VIEW, animated);
message.sendToTarget();
}
@Kroll.method
public void closeWindows(@Kroll.argument(optional = true) Object obj)
{
Boolean animated = true;
if (obj != null) {
animated = TiConvert.toBoolean(obj);
}
if (TiApplication.isUIThread()) {
handleCloseViews(animated);
return;
}
Message message = getMainHandler().obtainMessage(MSG_CLOSE_VIEWS, animated);
message.sendToTarget();
}
@Kroll.method
public boolean isLeftWindowOpen()
{
return getSlidingMenu().isMenuShowing();
}
@Kroll.method
public boolean isRightWindowOpen()
{
return getSlidingMenu().isSecondaryMenuShowing();
}
@Kroll.method
public boolean isAnyWindowOpen()
{
SlidingMenu menu = getSlidingMenu();
return menu.isSecondaryMenuShowing() || menu.isMenuShowing();
}
@Kroll.method
public int getRealLeftViewWidth()
{
SlidingMenu menu = slidingMenu.get();
return menu.getBehindOffset();
}
@Kroll.method
public int getRealRightViewWidth()
{
return getSlidingMenu().getBehindOffset();
}
/*
* PROPERTIES
*/
@Kroll.method @Kroll.setProperty
@Override
public void setOrientationModes(int[] modes) {
// Unlike Windows this setter is not defined in JavaScript.
// We need to expose it here with an annotation.
super.setOrientationModes(modes);
}
// Parallax
@Kroll.method @Kroll.getProperty
public float getParallaxAmount() {
SlidingMenu menu = slidingMenu.get();
return menu.getBehindScrollScale();
}
@Kroll.method @Kroll.setProperty
public void setParallaxAmount(Object value){
SlidingMenu menu = slidingMenu.get();
menu.setBehindScrollScale(TiConvert.toFloat(value));
}
@Kroll.method @Kroll.setProperty
public void setFading(Object arg){
setPropertyAndFire(Drawer.PROPERTY_FADING, arg);
}
// window setters..
@Kroll.method @Kroll.setProperty
public void setLeftWindow(Object arg){
setPropertyAndFire(Drawer.PROPERTY_LEFT_VIEW, arg);
}
@Kroll.method @Kroll.setProperty
public void setRightWindow(Object arg){
setPropertyAndFire(Drawer.PROPERTY_RIGHT_VIEW, arg);
}
@Kroll.method @Kroll.setProperty
public void setCenterWindow(Object arg){
setPropertyAndFire(Drawer.PROPERTY_CENTER_VIEW, arg);
}
// Drawer width
@Kroll.method @Kroll.setProperty
public void setLeftDrawerWidth(Object arg){
setPropertyAndFire(Drawer.PROPERTY_LEFT_VIEW_WIDTH, arg);
}
@Kroll.method @Kroll.setProperty
public void setRightDrawerWidth(Object arg){
setPropertyAndFire(Drawer.PROPERTY_RIGHT_VIEW_WIDTH, arg);
}
// Shadow
@Kroll.method @Kroll.setProperty
public void setShadowWidth(Object arg){
setPropertyAndFire(Drawer.PROPERTY_SHADOW_WIDTH, arg);
}
@Kroll.method @Kroll.setProperty
public void setOpenDrawerGestureMode(Object arg){
setPropertyAndFire(Drawer.PROPERTY_OPEN_MODE, arg);
}
// Gesture & animation modes
@Kroll.method @Kroll.setProperty
public void setCloseDrawerGestureMode(Object arg){
setPropertyAndFire(Drawer.PROPERTY_CLOSE_MODE, arg);
}
@Kroll.method @Kroll.setProperty
public void setAnimationMode(Object arg){
setPropertyAndFire(Drawer.PROPERTY_ANIMATION_MODE, arg);
}
}
| apache-2.0 |
yintaoxue/read-open-source-code | kettle4.3/src/org/pentaho/di/trans/steps/groupby/GroupBy.java | 29332 | /*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.groupby;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.Set;
import java.util.TreeSet;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleFileException;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueDataUtil;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
/**
* Groups informations based on aggregation rules. (sum, count, ...)
*
* @author Matt
* @since 2-jun-2003
*/
public class GroupBy extends BaseStep implements StepInterface
{
private static Class<?> PKG = GroupByMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
private GroupByMeta meta;
private GroupByData data;
public GroupBy(StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans)
{
super(stepMeta, stepDataInterface, copyNr, transMeta, trans);
meta=(GroupByMeta)getStepMeta().getStepMetaInterface();
data=(GroupByData)stepDataInterface;
}
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException
{
meta=(GroupByMeta)smi;
data=(GroupByData)sdi;
Object[] r=getRow(); // get row!
if (first)
{
// What is the output looking like?
//
data.inputRowMeta = getInputRowMeta();
// In case we have 0 input rows, we still want to send out a single row aggregate
// However... the problem then is that we don't know the layout from receiving it from the previous step over the row set.
// So we need to calculated based on the metadata...
//
if (data.inputRowMeta==null)
{
data.inputRowMeta = getTransMeta().getPrevStepFields(getStepMeta());
}
data.outputRowMeta = data.inputRowMeta.clone();
meta.getFields(data.outputRowMeta, getStepname(), null, null, this);
// Do all the work we can beforehand
// Calculate indexes, loop up fields, etc.
//
data.counts = new long[meta.getSubjectField().length];
data.subjectnrs = new int[meta.getSubjectField().length];
data.cumulativeSumSourceIndexes = new ArrayList<Integer>();
data.cumulativeSumTargetIndexes = new ArrayList<Integer>();
data.cumulativeAvgSourceIndexes = new ArrayList<Integer>();
data.cumulativeAvgTargetIndexes = new ArrayList<Integer>();
for (int i=0;i<meta.getSubjectField().length;i++)
{
data.subjectnrs[i] = data.inputRowMeta.indexOfValue(meta.getSubjectField()[i]);
if (data.subjectnrs[i]<0)
{
logError(BaseMessages.getString(PKG, "GroupBy.Log.AggregateSubjectFieldCouldNotFound",meta.getSubjectField()[i])); //$NON-NLS-1$ //$NON-NLS-2$
setErrors(1);
stopAll();
return false;
}
if (meta.getAggregateType()[i]==GroupByMeta.TYPE_GROUP_CUMULATIVE_SUM)
{
data.cumulativeSumSourceIndexes.add(data.subjectnrs[i]);
// The position of the target in the output row is the input row size + i
//
data.cumulativeSumTargetIndexes.add(data.inputRowMeta.size()+i);
}
if (meta.getAggregateType()[i]==GroupByMeta.TYPE_GROUP_CUMULATIVE_AVERAGE)
{
data.cumulativeAvgSourceIndexes.add(data.subjectnrs[i]);
// The position of the target in the output row is the input row size + i
//
data.cumulativeAvgTargetIndexes.add(data.inputRowMeta.size()+i);
}
}
data.previousSums = new Object[data.cumulativeSumTargetIndexes.size()];
data.previousAvgSum = new Object[data.cumulativeAvgTargetIndexes.size()];
data.previousAvgCount = new long[data.cumulativeAvgTargetIndexes.size()];
data.groupnrs = new int[meta.getGroupField().length];
for (int i=0;i<meta.getGroupField().length;i++)
{
data.groupnrs[i] = data.inputRowMeta.indexOfValue(meta.getGroupField()[i]);
if (data.groupnrs[i]<0)
{
logError(BaseMessages.getString(PKG, "GroupBy.Log.GroupFieldCouldNotFound",meta.getGroupField()[i])); //$NON-NLS-1$ //$NON-NLS-2$
setErrors(1);
stopAll();
return false;
}
}
// Create a metadata value for the counter Integers
//
data.valueMetaInteger = new ValueMeta("count", ValueMetaInterface.TYPE_INTEGER);
data.valueMetaNumber = new ValueMeta("sum", ValueMetaInterface.TYPE_NUMBER);
// Initialize the group metadata
//
initGroupMeta(data.inputRowMeta);
}
if (first || data.newBatch) {
// Create a new group aggregate (init)
//
newAggregate(r);
}
if (first) {
// for speed: groupMeta+aggMeta
//
data.groupAggMeta = new RowMeta();
data.groupAggMeta.addRowMeta(data.groupMeta);
data.groupAggMeta.addRowMeta(data.aggMeta);
}
if (r==null) // no more input to be expected... (or none received in the first place)
{
handleLastOfGroup();
setOutputDone();
return false;
}
if (first || data.newBatch) {
first = false;
data.newBatch = false;
data.previous = data.inputRowMeta.cloneRow(r); // copy the row to previous
} else {
calcAggregate(data.previous);
// System.out.println("After calc, agg="+agg);
if (meta.passAllRows()) {
addToBuffer(data.previous);
}
}
// System.out.println("Check for same group...");
if (!sameGroup(data.previous, r))
{
// System.out.println("Different group!");
if (meta.passAllRows())
{
// System.out.println("Close output...");
// Not the same group: close output (if any)
closeOutput();
// System.out.println("getAggregateResult()");
// Get all rows from the buffer!
data.groupResult = getAggregateResult();
// System.out.println("dump rows from the buffer");
Object[] row = getRowFromBuffer();
long lineNr=0;
while (row!=null)
{
int size = data.inputRowMeta.size();
row=RowDataUtil.addRowData(row, size, data.groupResult);
size+=data.groupResult.length;
lineNr++;
if (meta.isAddingLineNrInGroup() && !Const.isEmpty(meta.getLineNrInGroupField()))
{
Object lineNrValue= new Long(lineNr);
// ValueMetaInterface lineNrValueMeta = new ValueMeta(meta.getLineNrInGroupField(), ValueMetaInterface.TYPE_INTEGER);
// lineNrValueMeta.setLength(9);
row=RowDataUtil.addValueData(row, size, lineNrValue);
size++;
}
addCumulativeSums(row);
addCumulativeAverages(row);
putRow(data.outputRowMeta, row);
row = getRowFromBuffer();
}
closeInput();
}
else
{
Object[] result = buildResult(data.previous);
if (result!=null) {
putRow(data.groupAggMeta, result); // copy row to possible alternate rowset(s).
}
}
newAggregate(r); // Create a new group aggregate (init)
}
data.previous=data.inputRowMeta.cloneRow(r);
if (checkFeedback(getLinesRead()))
{
if(log.isBasic()) logBasic(BaseMessages.getString(PKG, "GroupBy.LineNumber")+getLinesRead()); //$NON-NLS-1$
}
return true;
}
private void handleLastOfGroup() throws KettleException {
if (meta.passAllRows()) // ALL ROWS
{
if (data.previous!=null)
{
calcAggregate(data.previous);
addToBuffer(data.previous);
}
data.groupResult = getAggregateResult();
Object[] row = getRowFromBuffer();
long lineNr=0;
while (row!=null)
{
int size = data.inputRowMeta.size();
row=RowDataUtil.addRowData(row, size, data.groupResult);
size+=data.groupResult.length;
lineNr++;
if (meta.isAddingLineNrInGroup() && !Const.isEmpty(meta.getLineNrInGroupField()))
{
Object lineNrValue= new Long(lineNr);
// ValueMetaInterface lineNrValueMeta = new ValueMeta(meta.getLineNrInGroupField(), ValueMetaInterface.TYPE_INTEGER);
// lineNrValueMeta.setLength(9);
row=RowDataUtil.addValueData(row, size, lineNrValue);
size++;
}
addCumulativeSums(row);
addCumulativeAverages(row);
putRow(data.outputRowMeta, row);
row = getRowFromBuffer();
}
closeInput();
}
else // JUST THE GROUP + AGGREGATE
{
// Don't forget the last set of rows...
if (data.previous!=null)
{
calcAggregate(data.previous);
}
Object[] result = buildResult(data.previous);
if (result!=null) {
putRow(data.groupAggMeta, result);
}
}
}
private void addCumulativeSums(Object[] row) throws KettleValueException {
// We need to adjust this row with cumulative averages?
//
for (int i=0;i<data.cumulativeSumSourceIndexes.size();i++)
{
int sourceIndex = data.cumulativeSumSourceIndexes.get(i);
Object previousTarget = data.previousSums[i];
Object sourceValue = row[sourceIndex];
int targetIndex = data.cumulativeSumTargetIndexes.get(i);
ValueMetaInterface sourceMeta = data.inputRowMeta.getValueMeta(sourceIndex);
ValueMetaInterface targetMeta = data.outputRowMeta.getValueMeta(targetIndex);
// If the first values where null, or this is the first time around, just take the source value...
//
if (targetMeta.isNull(previousTarget))
{
row[targetIndex]=sourceMeta.convertToNormalStorageType(sourceValue);
}
else
{
// If the source value is null, just take the previous target value
//
if (sourceMeta.isNull(sourceValue))
{
row[targetIndex] = previousTarget;
}
else
{
row[targetIndex] = ValueDataUtil.plus(targetMeta, data.previousSums[i], sourceMeta, row[sourceIndex]);
}
}
data.previousSums[i] = row[targetIndex];
}
}
private void addCumulativeAverages(Object[] row) throws KettleValueException {
// We need to adjust this row with cumulative sums
//
for (int i=0;i<data.cumulativeAvgSourceIndexes.size();i++)
{
int sourceIndex = data.cumulativeAvgSourceIndexes.get(i);
Object previousTarget = data.previousAvgSum[i];
Object sourceValue = row[sourceIndex];
int targetIndex = data.cumulativeAvgTargetIndexes.get(i);
ValueMetaInterface sourceMeta = data.inputRowMeta.getValueMeta(sourceIndex);
ValueMetaInterface targetMeta = data.outputRowMeta.getValueMeta(targetIndex);
// If the first values where null, or this is the first time around, just take the source value...
//
Object sum = null;
if (targetMeta.isNull(previousTarget))
{
sum=sourceMeta.convertToNormalStorageType(sourceValue);
}
else
{
// If the source value is null, just take the previous target value
//
if (sourceMeta.isNull(sourceValue))
{
sum = previousTarget;
}
else
{
if (sourceMeta.isInteger())
{
sum = ValueDataUtil.plus(data.valueMetaInteger, data.previousAvgSum[i], sourceMeta, row[sourceIndex]);
}
else
{
sum = ValueDataUtil.plus(targetMeta, data.previousAvgSum[i], sourceMeta, row[sourceIndex]);
}
}
}
data.previousAvgSum[i] = sum;
if (!sourceMeta.isNull(sourceValue)) data.previousAvgCount[i]++;
if (sourceMeta.isInteger()) {
// Change to number as the exception
//
if (sum==null)
{
row[targetIndex] = null;
}
else
{
row[targetIndex] = new Double( ((Long)sum).doubleValue() / data.previousAvgCount[i] );
}
}
else
{
row[targetIndex] = ValueDataUtil.divide(targetMeta, sum, data.valueMetaInteger, data.previousAvgCount[i]);
}
}
}
// Is the row r of the same group as previous?
private boolean sameGroup(Object[] previous, Object[] r) throws KettleValueException
{
return data.inputRowMeta.compare(previous, r, data.groupnrs) == 0;
}
// Calculate the aggregates in the row...
@SuppressWarnings("unchecked")
private void calcAggregate(Object[] r) throws KettleValueException
{
for (int i=0;i<data.subjectnrs.length;i++)
{
Object subj = r[data.subjectnrs[i]];
ValueMetaInterface subjMeta=data.inputRowMeta.getValueMeta(data.subjectnrs[i]);
Object value = data.agg[i];
ValueMetaInterface valueMeta=data.aggMeta.getValueMeta(i);
//System.out.println(" calcAggregate value, i="+i+", agg.size()="+agg.size()+", subj="+subj+", value="+value);
switch(meta.getAggregateType()[i])
{
case GroupByMeta.TYPE_GROUP_SUM :
data.agg[i]=ValueDataUtil.sum(valueMeta, value, subjMeta, subj);
break;
case GroupByMeta.TYPE_GROUP_AVERAGE :
if (!subjMeta.isNull(subj)) {
data.agg[i]=ValueDataUtil.sum(valueMeta, value, subjMeta, subj);
data.counts[i]++;
}
break;
case GroupByMeta.TYPE_GROUP_STANDARD_DEVIATION :
if (!subjMeta.isNull(subj)) {
data.counts[i]++;
double n = data.counts[i];
double x = subjMeta.getNumber(subj);
double sum = (Double)value;
double mean = data.mean[i];
double delta = x - mean;
mean = mean + (delta/n);
sum = sum + delta*(x-mean);
data.mean[i] = mean;
data.agg[i] = sum;
}
break;
case GroupByMeta.TYPE_GROUP_COUNT_DISTINCT :
if (!subjMeta.isNull(subj)) {
if (data.distinctObjs == null) {
data.distinctObjs = new Set[meta.getSubjectField().length];
}
if (data.distinctObjs[i] == null) {
data.distinctObjs[i] = new TreeSet<Object>();
}
Object obj = subjMeta.convertToNormalStorageType(subj);
if (!data.distinctObjs[i].contains(obj)) {
data.distinctObjs[i].add(obj);
data.agg[i] = (Long)value + 1;
}
}
case GroupByMeta.TYPE_GROUP_COUNT_ALL :
if (!subjMeta.isNull(subj)) {
data.counts[i]++;
}
break;
case GroupByMeta.TYPE_GROUP_MIN :
if(subjMeta.isSortedDescending()) {
// Account for negation in ValueMeta.compare() - See PDI-2302
if (subjMeta.compare(value,valueMeta,subj)<0) data.agg[i]=subj;
} else {
if (subjMeta.compare(subj,valueMeta,value)<0) data.agg[i]=subj;
}
break;
case GroupByMeta.TYPE_GROUP_MAX :
if(subjMeta.isSortedDescending()) {
// Account for negation in ValueMeta.compare() - See PDI-2302
if (subjMeta.compare(value,valueMeta,subj)>0) data.agg[i]=subj;
} else {
if (subjMeta.compare(subj,valueMeta,value)>0) data.agg[i]=subj;
}
break;
case GroupByMeta.TYPE_GROUP_FIRST :
if (!(subj==null) && value==null) data.agg[i]=subj;
break;
case GroupByMeta.TYPE_GROUP_LAST :
if (!(subj==null)) data.agg[i]=subj;
break;
case GroupByMeta.TYPE_GROUP_FIRST_INCL_NULL:
// This is on purpose. The calculation of the
// first field is done when setting up a new group
// This is just the field of the first row
// if (linesWritten==0) value.setValue(subj);
break;
case GroupByMeta.TYPE_GROUP_LAST_INCL_NULL :
data.agg[i]=subj;
break;
case GroupByMeta.TYPE_GROUP_CONCAT_COMMA :
if (!(subj==null))
{
StringBuilder sb = (StringBuilder) value;
if(sb.length()>0){
sb.append(", ");
}
sb.append(subjMeta.getString(subj));
}
break;
case GroupByMeta.TYPE_GROUP_CONCAT_STRING :
if (!(subj==null))
{
String separator="";
if(!Const.isEmpty(meta.getValueField()[i])) separator=environmentSubstitute(meta.getValueField()[i]);
StringBuilder sb = (StringBuilder) value;
if(sb.length()>0){
sb.append(separator);
}
sb.append(subjMeta.getString(subj));
}
break;
default: break;
}
}
}
// Initialize a group..
private void newAggregate(Object[] r)
{
// Put all the counters at 0
for (int i=0;i<data.counts.length;i++) data.counts[i]=0;
data.distinctObjs = null;
data.agg = new Object[data.subjectnrs.length];
data.mean = new double[data.subjectnrs.length]; // sets all doubles to 0.0
data.aggMeta=new RowMeta();
for (int i=0;i<data.subjectnrs.length;i++)
{
ValueMetaInterface subjMeta=data.inputRowMeta.getValueMeta(data.subjectnrs[i]);
Object v=null;
ValueMetaInterface vMeta=null;
switch(meta.getAggregateType()[i])
{
case GroupByMeta.TYPE_GROUP_SUM :
case GroupByMeta.TYPE_GROUP_AVERAGE :
case GroupByMeta.TYPE_GROUP_CUMULATIVE_SUM :
case GroupByMeta.TYPE_GROUP_CUMULATIVE_AVERAGE :
vMeta = new ValueMeta(meta.getAggregateField()[i], subjMeta.isNumeric()?subjMeta.getType():ValueMetaInterface.TYPE_NUMBER);
switch(subjMeta.getType())
{
case ValueMetaInterface.TYPE_BIGNUMBER: v=new BigDecimal("0"); break;
case ValueMetaInterface.TYPE_INTEGER: v=new Long(0L); break;
case ValueMetaInterface.TYPE_NUMBER:
default: v=new Double(0.0); break;
}
break;
case GroupByMeta.TYPE_GROUP_STANDARD_DEVIATION :
vMeta = new ValueMeta(meta.getAggregateField()[i], ValueMetaInterface.TYPE_NUMBER);
v=new Double(0.0);
break;
case GroupByMeta.TYPE_GROUP_COUNT_DISTINCT :
case GroupByMeta.TYPE_GROUP_COUNT_ALL :
vMeta = new ValueMeta(meta.getAggregateField()[i], ValueMetaInterface.TYPE_INTEGER);
v=new Long(0L);
break;
case GroupByMeta.TYPE_GROUP_FIRST :
case GroupByMeta.TYPE_GROUP_LAST :
case GroupByMeta.TYPE_GROUP_FIRST_INCL_NULL :
case GroupByMeta.TYPE_GROUP_LAST_INCL_NULL :
case GroupByMeta.TYPE_GROUP_MIN :
case GroupByMeta.TYPE_GROUP_MAX :
vMeta = subjMeta.clone();
vMeta.setName(meta.getAggregateField()[i]);
v = r==null ? null : r[data.subjectnrs[i]];
break;
case GroupByMeta.TYPE_GROUP_CONCAT_COMMA :
vMeta = new ValueMeta(meta.getAggregateField()[i], ValueMetaInterface.TYPE_STRING);
v = new StringBuilder(); //$NON-NLS-1$
break;
case GroupByMeta.TYPE_GROUP_CONCAT_STRING :
vMeta = new ValueMeta(meta.getAggregateField()[i], ValueMetaInterface.TYPE_STRING);
v = new StringBuilder(); //$NON-NLS-1$
break;
default:
// TODO raise an error here because we cannot continue successfully maybe the UI should validate this
break;
}
if (meta.getAggregateType()[i]!=GroupByMeta.TYPE_GROUP_COUNT_ALL &&
meta.getAggregateType()[i]!=GroupByMeta.TYPE_GROUP_COUNT_DISTINCT)
{
vMeta.setLength(subjMeta.getLength(), subjMeta.getPrecision());
}
if (v!=null) data.agg[i]=v;
data.aggMeta.addValueMeta(vMeta);
}
// Also clear the cumulative data...
//
for (int i=0;i<data.previousSums.length;i++) data.previousSums[i]=null;
for (int i=0;i<data.previousAvgCount.length;i++)
{
data.previousAvgCount[i]=0L;
data.previousAvgSum[i]=null;
}
}
private Object[] buildResult(Object[] r) throws KettleValueException
{
Object[] result=null;
if (r!=null || meta.isAlwaysGivingBackOneRow()) {
result = RowDataUtil.allocateRowData(data.groupnrs.length);
if (r!=null)
{
for (int i=0;i<data.groupnrs.length;i++)
{
result[i]=r[data.groupnrs[i]];
}
}
result=RowDataUtil.addRowData(result, data.groupnrs.length, getAggregateResult());
}
return result;
}
private void initGroupMeta(RowMetaInterface previousRowMeta) throws KettleValueException
{
data.groupMeta=new RowMeta();
for (int i=0;i<data.groupnrs.length;i++)
{
data.groupMeta.addValueMeta(previousRowMeta.getValueMeta(data.groupnrs[i]));
}
return;
}
private Object[] getAggregateResult() throws KettleValueException
{
Object[] result = new Object[data.subjectnrs.length];
if (data.subjectnrs!=null)
{
for (int i=0;i<data.subjectnrs.length;i++)
{
Object ag = data.agg[i];
switch(meta.getAggregateType()[i])
{
case GroupByMeta.TYPE_GROUP_SUM : break;
case GroupByMeta.TYPE_GROUP_AVERAGE :
ag=ValueDataUtil.divide(data.aggMeta.getValueMeta(i), ag,
new ValueMeta("c",ValueMetaInterface.TYPE_INTEGER), new Long(data.counts[i]));
break; //$NON-NLS-1$
case GroupByMeta.TYPE_GROUP_COUNT_ALL : ag=new Long(data.counts[i]); break;
case GroupByMeta.TYPE_GROUP_COUNT_DISTINCT : break;
case GroupByMeta.TYPE_GROUP_MIN : break;
case GroupByMeta.TYPE_GROUP_MAX : break;
case GroupByMeta.TYPE_GROUP_STANDARD_DEVIATION :
double sum = (Double)ag / data.counts[i];
ag = Double.valueOf( Math.sqrt( sum ) );
break;
case GroupByMeta.TYPE_GROUP_CONCAT_COMMA:;
case GroupByMeta.TYPE_GROUP_CONCAT_STRING:
ag = ((StringBuilder) ag).toString();
break;
default: break;
}
result[i]=ag;
}
}
return result;
}
private void addToBuffer(Object[] row) throws KettleFileException
{
data.bufferList.add(row);
if (data.bufferList.size()>5000)
{
if (data.rowsOnFile==0)
{
try
{
data.tempFile = File.createTempFile(meta.getPrefix(), ".tmp", new File(environmentSubstitute(meta.getDirectory()))); //$NON-NLS-1$
data.fos=new FileOutputStream(data.tempFile);
data.dos=new DataOutputStream(data.fos);
data.firstRead = true;
}
catch(IOException e)
{
throw new KettleFileException(BaseMessages.getString(PKG, "GroupBy.Exception.UnableToCreateTemporaryFile"), e); //$NON-NLS-1$
}
}
// OK, save the oldest rows to disk!
Object[] oldest = (Object[]) data.bufferList.get(0);
data.inputRowMeta.writeData(data.dos, oldest);
data.bufferList.remove(0);
data.rowsOnFile++;
}
}
private Object[] getRowFromBuffer() throws KettleFileException
{
if (data.rowsOnFile>0)
{
if (data.firstRead)
{
// Open the inputstream first...
try
{
data.fis=new FileInputStream( data.tempFile );
data.dis=new DataInputStream( data.fis );
data.firstRead = false;
}
catch(IOException e)
{
throw new KettleFileException(BaseMessages.getString(PKG, "GroupBy.Exception.UnableToReadBackRowFromTemporaryFile"), e); //$NON-NLS-1$
}
}
// Read one row from the file!
Object[] row;
try
{
row = data.inputRowMeta.readData(data.dis);
}
catch (SocketTimeoutException e)
{
throw new KettleFileException(e); // Shouldn't happen on files
}
data.rowsOnFile--;
return row;
}
else
{
if (data.bufferList.size()>0)
{
Object[] row = (Object[])data.bufferList.get(0);
data.bufferList.remove(0);
return row;
}
else
{
return null; // Nothing left!
}
}
}
private void closeOutput() throws KettleFileException
{
try
{
if (data.dos!=null) { data.dos.close(); data.dos=null; }
if (data.fos!=null) { data.fos.close(); data.fos=null; }
data.firstRead = true;
}
catch(IOException e)
{
throw new KettleFileException(BaseMessages.getString(PKG, "GroupBy.Exception.UnableToCloseInputStream"), e); //$NON-NLS-1$
}
}
private void closeInput() throws KettleFileException
{
try
{
if (data.fis!=null) { data.fis.close(); data.fis=null; }
if (data.dis!=null) { data.dis.close(); data.dis=null; }
}
catch(IOException e)
{
throw new KettleFileException(BaseMessages.getString(PKG, "GroupBy.Exception.UnableToCloseInputStream"), e); //$NON-NLS-1$
}
}
public boolean init(StepMetaInterface smi, StepDataInterface sdi)
{
meta=(GroupByMeta)smi;
data=(GroupByData)sdi;
if (super.init(smi, sdi))
{
data.bufferList = new ArrayList<Object[]>();
data.rowsOnFile = 0;
return true;
}
return false;
}
public void dispose(StepMetaInterface smi, StepDataInterface sdi)
{
if (data.tempFile!=null) data.tempFile.delete();
super.dispose(smi, sdi);
}
public void batchComplete() throws KettleException {
handleLastOfGroup();
data.newBatch=true;
}
}
| apache-2.0 |
googleads/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202111/NetworkServiceInterfacegetDefaultThirdPartyDataDeclarationResponse.java | 2332 | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.admanager.jaxws.v202111;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for getDefaultThirdPartyDataDeclarationResponse element declaration.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <element name="getDefaultThirdPartyDataDeclarationResponse">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="rval" type="{https://www.google.com/apis/ads/publisher/v202111}ThirdPartyDataDeclaration" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"rval"
})
@XmlRootElement(name = "getDefaultThirdPartyDataDeclarationResponse")
public class NetworkServiceInterfacegetDefaultThirdPartyDataDeclarationResponse {
protected ThirdPartyDataDeclaration rval;
/**
* Gets the value of the rval property.
*
* @return
* possible object is
* {@link ThirdPartyDataDeclaration }
*
*/
public ThirdPartyDataDeclaration getRval() {
return rval;
}
/**
* Sets the value of the rval property.
*
* @param value
* allowed object is
* {@link ThirdPartyDataDeclaration }
*
*/
public void setRval(ThirdPartyDataDeclaration value) {
this.rval = value;
}
}
| apache-2.0 |
pitchpoint-solutions/sfs | sfs-server/src/main/java/org/sfs/filesystem/volume/ReadStreamBlob.java | 1632 | /*
* Copyright 2016 The Simple File Server Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sfs.filesystem.volume;
import io.vertx.core.http.HttpClientResponse;
import org.sfs.io.BufferEndableWriteStream;
import rx.Observable;
import static java.lang.Long.parseLong;
import static org.sfs.util.SfsHttpHeaders.X_CONTENT_OFFSET;
public abstract class ReadStreamBlob extends HeaderBlob {
private final long offset;
public ReadStreamBlob(String volume, long position, long offset, long length) {
super(volume, position, length);
this.offset = offset;
}
public ReadStreamBlob(HttpClientResponse httpClientResponse) {
super(httpClientResponse);
this.offset = parseLong(httpClientResponse.headers().get(X_CONTENT_OFFSET));
}
@Override
public String toString() {
return "ReadStreamBlob{" +
"offset=" + offset +
"} " + super.toString();
}
public long getOffset() {
return offset;
}
public abstract Observable<Void> produce(BufferEndableWriteStream bufferStreamConsumer);
}
| apache-2.0 |
yafengguo/Apache-beam | runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java | 7601 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.flink.translation.functions;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.CombineFnBase;
import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.OutputTimeFn;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.util.PerKeyCombineFnRunner;
import org.apache.beam.sdk.util.PerKeyCombineFnRunners;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.util.WindowingStrategy;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.flink.api.common.functions.RichGroupCombineFunction;
import org.apache.flink.util.Collector;
import org.joda.time.Instant;
/**
* This is is the first step for executing a {@link org.apache.beam.sdk.transforms.Combine.PerKey}
* on Flink. The second part is {@link FlinkReduceFunction}. This function performs a local
* combine step before shuffling while the latter does the final combination after a shuffle.
*
* <p>The input to {@link #combine(Iterable, Collector)} are elements of the same key but
* for different windows. We have to ensure that we only combine elements of matching
* windows.
*/
public class FlinkPartialReduceFunction<K, InputT, AccumT, W extends BoundedWindow>
extends RichGroupCombineFunction<WindowedValue<KV<K, InputT>>, WindowedValue<KV<K, AccumT>>> {
protected final CombineFnBase.PerKeyCombineFn<K, InputT, AccumT, ?> combineFn;
protected final OldDoFn<KV<K, InputT>, KV<K, AccumT>> doFn;
protected final WindowingStrategy<?, W> windowingStrategy;
protected final SerializedPipelineOptions serializedOptions;
protected final Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs;
public FlinkPartialReduceFunction(
CombineFnBase.PerKeyCombineFn<K, InputT, AccumT, ?> combineFn,
WindowingStrategy<?, W> windowingStrategy,
Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs,
PipelineOptions pipelineOptions) {
this.combineFn = combineFn;
this.windowingStrategy = windowingStrategy;
this.sideInputs = sideInputs;
this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
// dummy OldDoFn because we need one for ProcessContext
this.doFn = new OldDoFn<KV<K, InputT>, KV<K, AccumT>>() {
@Override
public void processElement(ProcessContext c) throws Exception {
}
};
}
@Override
public void combine(
Iterable<WindowedValue<KV<K, InputT>>> elements,
Collector<WindowedValue<KV<K, AccumT>>> out) throws Exception {
FlinkSingleOutputProcessContext<KV<K, InputT>, KV<K, AccumT>> processContext =
new FlinkSingleOutputProcessContext<>(
serializedOptions.getPipelineOptions(),
getRuntimeContext(),
doFn,
windowingStrategy,
sideInputs, out
);
PerKeyCombineFnRunner<K, InputT, AccumT, ?> combineFnRunner =
PerKeyCombineFnRunners.create(combineFn);
@SuppressWarnings("unchecked")
OutputTimeFn<? super BoundedWindow> outputTimeFn =
(OutputTimeFn<? super BoundedWindow>) windowingStrategy.getOutputTimeFn();
// get all elements so that we can sort them, has to fit into
// memory
// this seems very unprudent, but correct, for now
ArrayList<WindowedValue<KV<K, InputT>>> sortedInput = Lists.newArrayList();
for (WindowedValue<KV<K, InputT>> inputValue: elements) {
for (WindowedValue<KV<K, InputT>> exploded: inputValue.explodeWindows()) {
sortedInput.add(exploded);
}
}
Collections.sort(sortedInput, new Comparator<WindowedValue<KV<K, InputT>>>() {
@Override
public int compare(
WindowedValue<KV<K, InputT>> o1,
WindowedValue<KV<K, InputT>> o2) {
return Iterables.getOnlyElement(o1.getWindows()).maxTimestamp()
.compareTo(Iterables.getOnlyElement(o2.getWindows()).maxTimestamp());
}
});
// iterate over the elements that are sorted by window timestamp
//
final Iterator<WindowedValue<KV<K, InputT>>> iterator = sortedInput.iterator();
// create accumulator using the first elements key
WindowedValue<KV<K, InputT>> currentValue = iterator.next();
K key = currentValue.getValue().getKey();
BoundedWindow currentWindow = Iterables.getFirst(currentValue.getWindows(), null);
InputT firstValue = currentValue.getValue().getValue();
processContext.setWindowedValue(currentValue);
AccumT accumulator = combineFnRunner.createAccumulator(key, processContext);
accumulator = combineFnRunner.addInput(key, accumulator, firstValue, processContext);
// we use this to keep track of the timestamps assigned by the OutputTimeFn
Instant windowTimestamp =
outputTimeFn.assignOutputTime(currentValue.getTimestamp(), currentWindow);
while (iterator.hasNext()) {
WindowedValue<KV<K, InputT>> nextValue = iterator.next();
BoundedWindow nextWindow = Iterables.getOnlyElement(nextValue.getWindows());
if (nextWindow.equals(currentWindow)) {
// continue accumulating
InputT value = nextValue.getValue().getValue();
processContext.setWindowedValue(nextValue);
accumulator = combineFnRunner.addInput(key, accumulator, value, processContext);
windowTimestamp = outputTimeFn.combine(
windowTimestamp,
outputTimeFn.assignOutputTime(nextValue.getTimestamp(), currentWindow));
} else {
// emit the value that we currently have
out.collect(
WindowedValue.of(
KV.of(key, accumulator),
windowTimestamp,
currentWindow,
PaneInfo.NO_FIRING));
currentWindow = nextWindow;
InputT value = nextValue.getValue().getValue();
processContext.setWindowedValue(nextValue);
accumulator = combineFnRunner.createAccumulator(key, processContext);
accumulator = combineFnRunner.addInput(key, accumulator, value, processContext);
windowTimestamp = outputTimeFn.assignOutputTime(nextValue.getTimestamp(), currentWindow);
}
}
// emit the final accumulator
out.collect(
WindowedValue.of(
KV.of(key, accumulator),
windowTimestamp,
currentWindow,
PaneInfo.NO_FIRING));
}
}
| apache-2.0 |
apixandru/simplegame | simplegame-server/src/test/java/com/notbed/simplegame/server/handler/TestLoginHandler.java | 1994 | /**
*
*/
package com.notbed.simplegame.server.handler;
import com.notbed.simplegame.brotocol.client.LoginRequest;
import com.notbed.simplegame.server.data.User;
import com.notbed.simplegame.server.data.UserDao;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import static com.notbed.simplegame.brotocol.server.LoginResponse.NO_ACCOUNT;
import static com.notbed.simplegame.brotocol.server.LoginResponse.SUCCESS;
import static com.notbed.simplegame.brotocol.server.LoginResponse.WRONG_PASSWORD;
import static java.util.Collections.singletonList;
/**
* @author Alexandru-Constantin Bledea
* @since Jul 15, 2015
*/
public final class TestLoginHandler {
private static final String USERNAME = "username";
private static final String PASSWORD = "password";
private final LoginRequest loginRequest = new LoginRequest(USERNAME, PASSWORD);
private UserDao userDao;
private MockPacketSender packetSender;
@Before
public void setup() {
this.packetSender = new MockPacketSender();
this.userDao = Mockito.mock(UserDao.class);
}
@Test
public void testNoSuchAccount() {
new LoginHandler(userDao).handlePacket(this.loginRequest, this.packetSender);
Assert.assertEquals(singletonList(NO_ACCOUNT), this.packetSender.getPackets());
}
@Test
public void testWrongPassword() {
Mockito.when(userDao.getUser(USERNAME)).thenReturn(new User(USERNAME, "skippy"));
new LoginHandler(userDao).handlePacket(this.loginRequest, this.packetSender);
Assert.assertEquals(singletonList(WRONG_PASSWORD), this.packetSender.getPackets());
}
@Test
public void testSuccess() {
Mockito.when(userDao.getUser(USERNAME)).thenReturn(new User(USERNAME, PASSWORD));
new LoginHandler(userDao).handlePacket(this.loginRequest, this.packetSender);
Assert.assertEquals(singletonList(SUCCESS), this.packetSender.getPackets());
}
}
| apache-2.0 |
vergilchiu/hive | ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java | 43746 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.reflect.Field;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.CompilationOpContext;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
import org.apache.hadoop.hive.serde2.lazy.LazyBinary;
import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
import org.apache.hadoop.hive.serde2.lazy.LazyString;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyBinaryObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.UnionObject;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import com.google.common.math.IntMath;
import javolution.util.FastBitSet;
/**
* GroupBy operator implementation.
*/
public class GroupByOperator extends Operator<GroupByDesc> {
private static final long serialVersionUID = 1L;
private static final int NUMROWSESTIMATESIZE = 1000;
private transient ExprNodeEvaluator[] keyFields;
private transient ObjectInspector[] keyObjectInspectors;
private transient ExprNodeEvaluator[][] aggregationParameterFields;
private transient ObjectInspector[][] aggregationParameterObjectInspectors;
private transient ObjectInspector[][] aggregationParameterStandardObjectInspectors;
private transient Object[][] aggregationParameterObjects;
// so aggregationIsDistinct is a boolean array instead of a single number.
private transient boolean[] aggregationIsDistinct;
// Map from integer tag to distinct aggrs
private transient Map<Integer, Set<Integer>> distinctKeyAggrs =
new HashMap<Integer, Set<Integer>>();
// Map from integer tag to non-distinct aggrs with key parameters.
private transient Map<Integer, Set<Integer>> nonDistinctKeyAggrs =
new HashMap<Integer, Set<Integer>>();
// List of non-distinct aggrs.
private transient List<Integer> nonDistinctAggrs = new ArrayList<Integer>();
// Union expr for distinct keys
private transient ExprNodeEvaluator unionExprEval;
private transient GenericUDAFEvaluator[] aggregationEvaluators;
private transient boolean[] estimableAggregationEvaluators;
// Used by sort-based GroupBy: Mode = COMPLETE, PARTIAL1, PARTIAL2,
// MERGEPARTIAL
private transient KeyWrapper currentKeys;
private transient KeyWrapper newKeys;
private transient AggregationBuffer[] aggregations;
private transient Object[][] aggregationsParametersLastInvoke;
// Used by hash-based GroupBy: Mode = HASH, PARTIALS
private transient HashMap<KeyWrapper, AggregationBuffer[]> hashAggregations;
private transient boolean firstRow;
private transient boolean hashAggr;
private transient long numRowsInput;
private transient long numRowsHashTbl;
private transient int groupbyMapAggrInterval;
private transient long numRowsCompareHashAggr;
private transient float minReductionHashAggr;
private transient int outputKeyLength;
// current Key ObjectInspectors are standard ObjectInspectors
private transient ObjectInspector[] currentKeyObjectInspectors;
private transient MemoryMXBean memoryMXBean;
private transient boolean groupingSetsPresent; // generates grouping set
private transient int groupingSetsPosition; // position of grouping set, generally the last of keys
private transient List<Integer> groupingSets; // declared grouping set values
private transient FastBitSet[] groupingSetsBitSet; // bitsets acquired from grouping set values
private transient IntWritable[] newKeysGroupingSets;
// for these positions, some variable primitive type (String) is used, so size
// cannot be estimated. sample it at runtime.
private transient List<Integer> keyPositionsSize;
// for these positions, some variable primitive type (String) is used for the
// aggregation classes
private transient List<Field>[] aggrPositions;
private transient int fixedRowSize;
private transient int totalVariableSize;
private transient int numEntriesVarSize;
private transient int countAfterReport; // report or forward
private transient int heartbeatInterval;
private transient boolean isTez;
private transient boolean isLlap;
private transient int numExecutors;
/**
* Total amount of memory allowed for JVM heap.
*/
protected transient long maxMemory;
/**
* Max memory usable by the hashtable before it should flush.
*/
protected transient long maxHashTblMemory;
/**
* configure percent of memory threshold usable by QP.
*/
protected transient float memoryThreshold;
/**
* Current number of entries in the hash table.
*/
protected transient int numEntriesHashTable;
/**
* This method returns the big-endian representation of value.
* @param value
* @param length
* @return
*/
public static FastBitSet groupingSet2BitSet(int value, int length) {
FastBitSet bits = new FastBitSet();
for (int index = length - 1; index >= 0; index--) {
if (value % 2 != 0) {
bits.set(index);
}
value = value >>> 1;
}
return bits;
}
/** Kryo ctor. */
protected GroupByOperator() {
super();
}
public GroupByOperator(CompilationOpContext ctx) {
super(ctx);
}
@Override
protected void initializeOp(Configuration hconf) throws HiveException {
super.initializeOp(hconf);
numRowsInput = 0;
numRowsHashTbl = 0;
heartbeatInterval = HiveConf.getIntVar(hconf,
HiveConf.ConfVars.HIVESENDHEARTBEAT);
countAfterReport = 0;
groupingSetsPresent = conf.isGroupingSetsPresent();
ObjectInspector rowInspector = inputObjInspectors[0];
// init keyFields
int numKeys = conf.getKeys().size();
keyFields = new ExprNodeEvaluator[numKeys];
keyObjectInspectors = new ObjectInspector[numKeys];
currentKeyObjectInspectors = new ObjectInspector[numKeys];
for (int i = 0; i < numKeys; i++) {
keyFields[i] = ExprNodeEvaluatorFactory.get(conf.getKeys().get(i), hconf);
keyObjectInspectors[i] = keyFields[i].initialize(rowInspector);
currentKeyObjectInspectors[i] = ObjectInspectorUtils
.getStandardObjectInspector(keyObjectInspectors[i],
ObjectInspectorCopyOption.WRITABLE);
}
// Initialize the constants for the grouping sets, so that they can be re-used for
// each row
if (groupingSetsPresent) {
groupingSets = conf.getListGroupingSets();
groupingSetsPosition = conf.getGroupingSetPosition();
newKeysGroupingSets = new IntWritable[groupingSets.size()];
groupingSetsBitSet = new FastBitSet[groupingSets.size()];
int pos = 0;
for (Integer groupingSet: groupingSets) {
// Create the mapping corresponding to the grouping set
newKeysGroupingSets[pos] = new IntWritable(groupingSet);
groupingSetsBitSet[pos] = groupingSet2BitSet(groupingSet, groupingSetsPosition);
pos++;
}
}
// initialize unionExpr for reduce-side
// reduce KEY has union field as the last field if there are distinct
// aggregates in group-by.
List<? extends StructField> sfs =
((StructObjectInspector) rowInspector).getAllStructFieldRefs();
if (sfs.size() > 0) {
StructField keyField = sfs.get(0);
if (keyField.getFieldName().toUpperCase().equals(
Utilities.ReduceField.KEY.name())) {
ObjectInspector keyObjInspector = keyField.getFieldObjectInspector();
if (keyObjInspector instanceof StructObjectInspector) {
List<? extends StructField> keysfs =
((StructObjectInspector) keyObjInspector).getAllStructFieldRefs();
if (keysfs.size() > 0) {
// the last field is the union field, if any
StructField sf = keysfs.get(keysfs.size() - 1);
if (sf.getFieldObjectInspector().getCategory().equals(
ObjectInspector.Category.UNION)) {
unionExprEval = ExprNodeEvaluatorFactory.get(
new ExprNodeColumnDesc(TypeInfoUtils.getTypeInfoFromObjectInspector(
sf.getFieldObjectInspector()),
keyField.getFieldName() + "." + sf.getFieldName(), null,
false), hconf);
unionExprEval.initialize(rowInspector);
}
}
}
}
}
// init aggregationParameterFields
ArrayList<AggregationDesc> aggrs = conf.getAggregators();
aggregationParameterFields = new ExprNodeEvaluator[aggrs.size()][];
aggregationParameterObjectInspectors = new ObjectInspector[aggrs.size()][];
aggregationParameterStandardObjectInspectors = new ObjectInspector[aggrs.size()][];
aggregationParameterObjects = new Object[aggrs.size()][];
aggregationIsDistinct = new boolean[aggrs.size()];
for (int i = 0; i < aggrs.size(); i++) {
AggregationDesc aggr = aggrs.get(i);
ArrayList<ExprNodeDesc> parameters = aggr.getParameters();
aggregationParameterFields[i] = new ExprNodeEvaluator[parameters.size()];
aggregationParameterObjectInspectors[i] = new ObjectInspector[parameters
.size()];
aggregationParameterStandardObjectInspectors[i] = new ObjectInspector[parameters
.size()];
aggregationParameterObjects[i] = new Object[parameters.size()];
for (int j = 0; j < parameters.size(); j++) {
aggregationParameterFields[i][j] = ExprNodeEvaluatorFactory
.get(parameters.get(j), hconf);
aggregationParameterObjectInspectors[i][j] = aggregationParameterFields[i][j]
.initialize(rowInspector);
if (unionExprEval != null) {
String[] names = parameters.get(j).getExprString().split("\\.");
// parameters of the form : KEY.colx:t.coly
if (Utilities.ReduceField.KEY.name().equals(names[0]) && names.length > 2) {
String name = names[names.length - 2];
int tag = Integer.parseInt(name.split("\\:")[1]);
if (aggr.getDistinct()) {
// is distinct
Set<Integer> set = distinctKeyAggrs.get(tag);
if (null == set) {
set = new HashSet<Integer>();
distinctKeyAggrs.put(tag, set);
}
if (!set.contains(i)) {
set.add(i);
}
} else {
Set<Integer> set = nonDistinctKeyAggrs.get(tag);
if (null == set) {
set = new HashSet<Integer>();
nonDistinctKeyAggrs.put(tag, set);
}
if (!set.contains(i)) {
set.add(i);
}
}
} else {
// will be KEY._COLx or VALUE._COLx
if (!nonDistinctAggrs.contains(i)) {
nonDistinctAggrs.add(i);
}
}
} else {
if (aggr.getDistinct()) {
aggregationIsDistinct[i] = true;
}
}
aggregationParameterStandardObjectInspectors[i][j] = ObjectInspectorUtils
.getStandardObjectInspector(
aggregationParameterObjectInspectors[i][j],
ObjectInspectorCopyOption.WRITABLE);
aggregationParameterObjects[i][j] = null;
}
if (parameters.size() == 0) {
// for ex: count(*)
if (!nonDistinctAggrs.contains(i)) {
nonDistinctAggrs.add(i);
}
}
}
// init aggregationClasses
aggregationEvaluators = new GenericUDAFEvaluator[conf.getAggregators()
.size()];
for (int i = 0; i < aggregationEvaluators.length; i++) {
AggregationDesc agg = conf.getAggregators().get(i);
aggregationEvaluators[i] = agg.getGenericUDAFEvaluator();
}
MapredContext context = MapredContext.get();
if (context != null) {
for (GenericUDAFEvaluator genericUDAFEvaluator : aggregationEvaluators) {
context.setup(genericUDAFEvaluator);
}
}
// grouping id should be pruned, which is the last of key columns
// see ColumnPrunerGroupByProc
outputKeyLength = conf.pruneGroupingSetId() ? keyFields.length - 1 : keyFields.length;
// init objectInspectors
ObjectInspector[] objectInspectors =
new ObjectInspector[outputKeyLength + aggregationEvaluators.length];
for (int i = 0; i < outputKeyLength; i++) {
objectInspectors[i] = currentKeyObjectInspectors[i];
}
for (int i = 0; i < aggregationEvaluators.length; i++) {
objectInspectors[outputKeyLength + i] = aggregationEvaluators[i].init(conf.getAggregators()
.get(i).getMode(), aggregationParameterObjectInspectors[i]);
}
aggregationsParametersLastInvoke = new Object[conf.getAggregators().size()][];
if ((conf.getMode() != GroupByDesc.Mode.HASH || conf.getBucketGroup()) &&
(!groupingSetsPresent)) {
aggregations = newAggregations();
hashAggr = false;
} else {
hashAggregations = new HashMap<KeyWrapper, AggregationBuffer[]>(256);
aggregations = newAggregations();
hashAggr = true;
keyPositionsSize = new ArrayList<Integer>();
aggrPositions = new List[aggregations.length];
groupbyMapAggrInterval = HiveConf.getIntVar(hconf,
HiveConf.ConfVars.HIVEGROUPBYMAPINTERVAL);
// compare every groupbyMapAggrInterval rows
numRowsCompareHashAggr = groupbyMapAggrInterval;
minReductionHashAggr = HiveConf.getFloatVar(hconf,
HiveConf.ConfVars.HIVEMAPAGGRHASHMINREDUCTION);
}
List<String> fieldNames = new ArrayList<String>(conf.getOutputColumnNames());
outputObjInspector = ObjectInspectorFactory
.getStandardStructObjectInspector(fieldNames, Arrays.asList(objectInspectors));
KeyWrapperFactory keyWrapperFactory =
new KeyWrapperFactory(keyFields, keyObjectInspectors, currentKeyObjectInspectors);
newKeys = keyWrapperFactory.getKeyWrapper();
isTez = HiveConf.getVar(hconf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez");
isLlap = isTez && HiveConf.getVar(hconf, HiveConf.ConfVars.HIVE_EXECUTION_MODE).equals("llap");
numExecutors = isLlap ? HiveConf.getIntVar(hconf, HiveConf.ConfVars.LLAP_DAEMON_NUM_EXECUTORS) : 1;
firstRow = true;
// estimate the number of hash table entries based on the size of each
// entry. Since the size of a entry
// is not known, estimate that based on the number of entries
if (hashAggr) {
computeMaxEntriesHashAggr();
}
memoryMXBean = ManagementFactory.getMemoryMXBean();
maxMemory = isTez ? getConf().getMaxMemoryAvailable() : memoryMXBean.getHeapMemoryUsage().getMax();
memoryThreshold = this.getConf().getMemoryThreshold();
LOG.info("isTez: {} isLlap: {} numExecutors: {} maxMemory: {}", isTez, isLlap, numExecutors, maxMemory);
}
/**
* Estimate the number of entries in map-side hash table. The user can specify
* the total amount of memory to be used by the map-side hash. By default, all
* available memory is used. The size of each row is estimated, rather
* crudely, and the number of entries are figure out based on that.
*
* @return number of entries that can fit in hash table - useful for map-side
* aggregation only
**/
private void computeMaxEntriesHashAggr() throws HiveException {
float memoryPercentage = this.getConf().getGroupByMemoryUsage();
if (isTez) {
maxHashTblMemory = (long) (memoryPercentage * getConf().getMaxMemoryAvailable());
} else {
maxHashTblMemory = (long) (memoryPercentage * Runtime.getRuntime().maxMemory());
}
LOG.info("Max hash table memory: {} bytes", maxHashTblMemory);
estimateRowSize();
}
public static final int javaObjectOverHead = 64;
public static final int javaHashEntryOverHead = 64;
public static final int javaSizePrimitiveType = 16;
public static final int javaSizeUnknownType = 256;
/**
* The size of the element at position 'pos' is returned, if possible. If the
* datatype is of variable length, STRING, a list of such key positions is
* maintained, and the size for such positions is then actually calculated at
* runtime.
*
* @param pos
* the position of the key
* @return the size of this datatype
**/
private int getSize(int pos, PrimitiveCategory category) {
switch (category) {
case VOID:
case BOOLEAN:
case BYTE:
case SHORT:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
return javaSizePrimitiveType;
case STRING:
keyPositionsSize.add(new Integer(pos));
return javaObjectOverHead;
case BINARY:
keyPositionsSize.add(new Integer(pos));
return javaObjectOverHead;
case TIMESTAMP:
return javaObjectOverHead + javaSizePrimitiveType;
default:
return javaSizeUnknownType;
}
}
/**
* The size of the element at position 'pos' is returned, if possible. If the
* field is of variable length, STRING, a list of such field names for the
* field position is maintained, and the size for such positions is then
* actually calculated at runtime.
*
* @param pos
* the position of the key
* @param c
* the type of the key
* @param f
* the field to be added
* @return the size of this datatype
**/
private int getSize(int pos, Class<?> c, Field f) {
if (c.isPrimitive()
|| c.isInstance(Boolean.valueOf(true))
|| c.isInstance(Byte.valueOf((byte) 0))
|| c.isInstance(Short.valueOf((short) 0))
|| c.isInstance(Integer.valueOf(0))
|| c.isInstance(Long.valueOf(0))
|| c.isInstance(new Float(0))
|| c.isInstance(new Double(0))) {
return javaSizePrimitiveType;
}
if (c.isInstance(new Timestamp(0))){
return javaObjectOverHead + javaSizePrimitiveType;
}
if (c.isInstance(new String()) || c.isInstance(new ByteArrayRef())) {
if (aggrPositions[pos] == null) {
aggrPositions[pos] = new ArrayList<Field>();
}
aggrPositions[pos].add(f);
return javaObjectOverHead;
}
return javaSizeUnknownType;
}
/**
* @param pos
* position of the key
* @param typeInfo
* type of the input
* @return the size of this datatype
**/
private int getSize(int pos, TypeInfo typeInfo) {
if (typeInfo instanceof PrimitiveTypeInfo) {
return getSize(pos, ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory());
}
return javaSizeUnknownType;
}
/**
* @return the size of each row
**/
private void estimateRowSize() throws HiveException {
// estimate the size of each entry -
// a datatype with unknown size (String/Struct etc. - is assumed to be 256
// bytes for now).
// 64 bytes is the overhead for a reference
fixedRowSize = javaHashEntryOverHead;
ArrayList<ExprNodeDesc> keys = conf.getKeys();
// Go over all the keys and get the size of the fields of fixed length. Keep
// track of the variable length keys
for (int pos = 0; pos < keys.size(); pos++) {
fixedRowSize += getSize(pos, keys.get(pos).getTypeInfo());
}
// Go over all the aggregation classes and and get the size of the fields of
// fixed length. Keep track of the variable length
// fields in these aggregation classes.
estimableAggregationEvaluators = new boolean[aggregationEvaluators.length];
for (int i = 0; i < aggregationEvaluators.length; i++) {
fixedRowSize += javaObjectOverHead;
AggregationBuffer agg = aggregationEvaluators[i].getNewAggregationBuffer();
if (GenericUDAFEvaluator.isEstimable(agg)) {
estimableAggregationEvaluators[i] = true;
continue;
}
Field[] fArr = ObjectInspectorUtils.getDeclaredNonStaticFields(agg.getClass());
for (Field f : fArr) {
fixedRowSize += getSize(i, f.getType(), f);
}
}
}
protected AggregationBuffer[] newAggregations() throws HiveException {
AggregationBuffer[] aggs = new AggregationBuffer[aggregationEvaluators.length];
for (int i = 0; i < aggregationEvaluators.length; i++) {
aggs[i] = aggregationEvaluators[i].getNewAggregationBuffer();
// aggregationClasses[i].reset(aggs[i]);
}
return aggs;
}
protected void resetAggregations(AggregationBuffer[] aggs) throws HiveException {
for (int i = 0; i < aggs.length; i++) {
aggregationEvaluators[i].reset(aggs[i]);
}
}
/*
* Update aggregations. If the aggregation is for distinct, in case of hash
* aggregation, the client tells us whether it is a new entry. For sort-based
* aggregations, the last row is compared with the current one to figure out
* whether it has changed. As a cleanup, the lastInvoke logic can be pushed in
* the caller, and this function can be independent of that. The client should
* always notify whether it is a different row or not.
*
* @param aggs the aggregations to be evaluated
*
* @param row the row being processed
*
* @param rowInspector the inspector for the row
*
* @param hashAggr whether hash aggregation is being performed or not
*
* @param newEntryForHashAggr only valid if it is a hash aggregation, whether
* it is a new entry or not
*/
protected void updateAggregations(AggregationBuffer[] aggs, Object row,
ObjectInspector rowInspector, boolean hashAggr,
boolean newEntryForHashAggr, Object[][] lastInvoke) throws HiveException {
if (unionExprEval == null) {
for (int ai = 0; ai < aggs.length; ai++) {
// Calculate the parameters
Object[] o = new Object[aggregationParameterFields[ai].length];
for (int pi = 0; pi < aggregationParameterFields[ai].length; pi++) {
o[pi] = aggregationParameterFields[ai][pi].evaluate(row);
}
// Update the aggregations.
if (aggregationIsDistinct[ai]) {
if (hashAggr) {
if (newEntryForHashAggr) {
aggregationEvaluators[ai].aggregate(aggs[ai], o);
}
} else {
if (lastInvoke[ai] == null) {
lastInvoke[ai] = new Object[o.length];
}
if (ObjectInspectorUtils.compare(o,
aggregationParameterObjectInspectors[ai], lastInvoke[ai],
aggregationParameterStandardObjectInspectors[ai]) != 0) {
aggregationEvaluators[ai].aggregate(aggs[ai], o);
for (int pi = 0; pi < o.length; pi++) {
lastInvoke[ai][pi] = ObjectInspectorUtils.copyToStandardObject(
o[pi], aggregationParameterObjectInspectors[ai][pi],
ObjectInspectorCopyOption.WRITABLE);
}
}
}
} else {
aggregationEvaluators[ai].aggregate(aggs[ai], o);
}
}
return;
}
if (distinctKeyAggrs.size() > 0) {
// evaluate union object
UnionObject uo = (UnionObject) (unionExprEval.evaluate(row));
int unionTag = uo.getTag();
// update non-distinct key aggregations : "KEY._colx:t._coly"
if (nonDistinctKeyAggrs.get(unionTag) != null) {
for (int pos : nonDistinctKeyAggrs.get(unionTag)) {
Object[] o = new Object[aggregationParameterFields[pos].length];
for (int pi = 0; pi < aggregationParameterFields[pos].length; pi++) {
o[pi] = aggregationParameterFields[pos][pi].evaluate(row);
}
aggregationEvaluators[pos].aggregate(aggs[pos], o);
}
}
// there may be multi distinct clauses for one column
// update them all.
if (distinctKeyAggrs.get(unionTag) != null) {
for (int i : distinctKeyAggrs.get(unionTag)) {
Object[] o = new Object[aggregationParameterFields[i].length];
for (int pi = 0; pi < aggregationParameterFields[i].length; pi++) {
o[pi] = aggregationParameterFields[i][pi].evaluate(row);
}
if (hashAggr) {
if (newEntryForHashAggr) {
aggregationEvaluators[i].aggregate(aggs[i], o);
}
} else {
if (lastInvoke[i] == null) {
lastInvoke[i] = new Object[o.length];
}
if (ObjectInspectorUtils.compare(o,
aggregationParameterObjectInspectors[i],
lastInvoke[i],
aggregationParameterStandardObjectInspectors[i]) != 0) {
aggregationEvaluators[i].aggregate(aggs[i], o);
for (int pi = 0; pi < o.length; pi++) {
lastInvoke[i][pi] = ObjectInspectorUtils.copyToStandardObject(
o[pi], aggregationParameterObjectInspectors[i][pi],
ObjectInspectorCopyOption.WRITABLE);
}
}
}
}
}
// update non-distinct groupby key or value aggregations: 'KEY._COLx or VALUE._colx'
// these aggregations should be updated only once.
if (unionTag == 0) {
for (int pos : nonDistinctAggrs) {
Object[] o = new Object[aggregationParameterFields[pos].length];
for (int pi = 0; pi < aggregationParameterFields[pos].length; pi++) {
o[pi] = aggregationParameterFields[pos][pi].evaluate(row);
}
aggregationEvaluators[pos].aggregate(aggs[pos], o);
}
}
} else {
for (int ai = 0; ai < aggs.length; ai++) {
// there is no distinct aggregation,
// update all aggregations
Object[] o = new Object[aggregationParameterFields[ai].length];
for (int pi = 0; pi < aggregationParameterFields[ai].length; pi++) {
o[pi] = aggregationParameterFields[ai][pi].evaluate(row);
}
aggregationEvaluators[ai].aggregate(aggs[ai], o);
}
}
}
private void processKey(Object row,
ObjectInspector rowInspector) throws HiveException {
if (hashAggr) {
newKeys.setHashKey();
processHashAggr(row, rowInspector, newKeys);
} else {
processAggr(row, rowInspector, newKeys);
}
if (countAfterReport != 0 && (countAfterReport % heartbeatInterval) == 0
&& (reporter != null)) {
reporter.progress();
countAfterReport = 0;
}
}
@Override
public void process(Object row, int tag) throws HiveException {
firstRow = false;
ObjectInspector rowInspector = inputObjInspectors[tag];
// Total number of input rows is needed for hash aggregation only
if (hashAggr) {
numRowsInput++;
// if hash aggregation is not behaving properly, disable it
if (numRowsInput == numRowsCompareHashAggr) {
numRowsCompareHashAggr += groupbyMapAggrInterval;
// map-side aggregation should reduce the entries by at-least half
if (numRowsHashTbl > numRowsInput * minReductionHashAggr) {
LOG.warn("Disable Hash Aggr: #hash table = " + numRowsHashTbl
+ " #total = " + numRowsInput + " reduction = " + 1.0
* (numRowsHashTbl / numRowsInput) + " minReduction = "
+ minReductionHashAggr);
flushHashTable(true);
hashAggr = false;
} else {
if (isLogTraceEnabled) {
LOG.trace("Hash Aggr Enabled: #hash table = " + numRowsHashTbl
+ " #total = " + numRowsInput + " reduction = " + 1.0
* (numRowsHashTbl / numRowsInput) + " minReduction = "
+ minReductionHashAggr);
}
}
}
}
try {
countAfterReport++;
newKeys.getNewKey(row, rowInspector);
if (groupingSetsPresent) {
Object[] newKeysArray = newKeys.getKeyArray();
Object[] cloneNewKeysArray = new Object[newKeysArray.length];
for (int keyPos = 0; keyPos < groupingSetsPosition; keyPos++) {
cloneNewKeysArray[keyPos] = newKeysArray[keyPos];
}
for (int groupingSetPos = 0; groupingSetPos < groupingSets.size(); groupingSetPos++) {
for (int keyPos = 0; keyPos < groupingSetsPosition; keyPos++) {
newKeysArray[keyPos] = null;
}
FastBitSet bitset = groupingSetsBitSet[groupingSetPos];
// Some keys need to be left to null corresponding to that grouping set.
for (int keyPos = bitset.nextClearBit(0); keyPos < groupingSetsPosition;
keyPos = bitset.nextClearBit(keyPos+1)) {
newKeysArray[keyPos] = cloneNewKeysArray[keyPos];
}
newKeysArray[groupingSetsPosition] = newKeysGroupingSets[groupingSetPos];
processKey(row, rowInspector);
}
} else {
processKey(row, rowInspector);
}
} catch (HiveException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
private void processHashAggr(Object row, ObjectInspector rowInspector,
KeyWrapper newKeys) throws HiveException {
// Prepare aggs for updating
AggregationBuffer[] aggs = null;
boolean newEntryForHashAggr = false;
// hash-based aggregations
aggs = hashAggregations.get(newKeys);
if (aggs == null) {
KeyWrapper newKeyProber = newKeys.copyKey();
aggs = newAggregations();
hashAggregations.put(newKeyProber, aggs);
newEntryForHashAggr = true;
numRowsHashTbl++; // new entry in the hash table
}
// Update the aggs
updateAggregations(aggs, row, rowInspector, true, newEntryForHashAggr, null);
// We can only flush after the updateAggregations is done, or the
// potentially new entry "aggs"
// can be flushed out of the hash table.
// Based on user-specified parameters, check if the hash table needs to be
// flushed.
if ( shouldBeFlushed(newKeys)) {
flushHashTable(false);
}
}
// Non-hash aggregation
private void processAggr(Object row,
ObjectInspector rowInspector,
KeyWrapper newKeys) throws HiveException {
// Prepare aggs for updating
AggregationBuffer[] aggs = null;
Object[][] lastInvoke = null;
//boolean keysAreEqual = (currentKeys != null && newKeys != null)?
// newKeyStructEqualComparer.areEqual(currentKeys, newKeys) : false;
boolean keysAreEqual = (currentKeys != null && newKeys != null)?
newKeys.equals(currentKeys) : false;
// Forward the current keys if needed for sort-based aggregation
if (currentKeys != null && !keysAreEqual) {
// This is to optimize queries of the form:
// select count(distinct key) from T
// where T is sorted and bucketized by key
// Partial aggregation is performed on the mapper, and the
// reducer gets 1 row (partial result) per mapper.
if (!conf.isDontResetAggrsDistinct()) {
forward(currentKeys.getKeyArray(), aggregations);
countAfterReport = 0;
}
}
// Need to update the keys?
if (currentKeys == null || !keysAreEqual) {
if (currentKeys == null) {
currentKeys = newKeys.copyKey();
} else {
currentKeys.copyKey(newKeys);
}
// Reset the aggregations
// For distincts optimization with sorting/bucketing, perform partial aggregation
if (!conf.isDontResetAggrsDistinct()) {
resetAggregations(aggregations);
}
// clear parameters in last-invoke
for (int i = 0; i < aggregationsParametersLastInvoke.length; i++) {
aggregationsParametersLastInvoke[i] = null;
}
}
aggs = aggregations;
lastInvoke = aggregationsParametersLastInvoke;
// Update the aggs
updateAggregations(aggs, row, rowInspector, false, false, lastInvoke);
}
/**
* Based on user-parameters, should the hash table be flushed.
*
* @param newKeys
* keys for the row under consideration
**/
private boolean shouldBeFlushed(KeyWrapper newKeys) {
int numEntries = hashAggregations.size();
long usedMemory;
float rate;
// The fixed size for the aggregation class is already known. Get the
// variable portion of the size every NUMROWSESTIMATESIZE rows.
if ((numEntriesHashTable == 0) || ((numEntries % NUMROWSESTIMATESIZE) == 0)) {
//check how much memory left memory
usedMemory = memoryMXBean.getHeapMemoryUsage().getUsed();
// TODO: there is no easy and reliable way to compute the memory used by the executor threads and on-heap cache.
// Assuming the used memory is equally divided among all executors.
usedMemory = isLlap ? usedMemory / numExecutors : usedMemory;
rate = (float) usedMemory / (float) maxMemory;
if(rate > memoryThreshold){
if (isTez && numEntriesHashTable == 0) {
return false;
} else {
return true;
}
}
for (Integer pos : keyPositionsSize) {
Object key = newKeys.getKeyArray()[pos.intValue()];
// Ignore nulls
if (key != null) {
if (key instanceof LazyString) {
totalVariableSize +=
((LazyPrimitive<LazyStringObjectInspector, Text>) key).
getWritableObject().getLength();
} else if (key instanceof String) {
totalVariableSize += ((String) key).length();
} else if (key instanceof Text) {
totalVariableSize += ((Text) key).getLength();
} else if (key instanceof LazyBinary) {
totalVariableSize +=
((LazyPrimitive<LazyBinaryObjectInspector, BytesWritable>) key).
getWritableObject().getLength();
} else if (key instanceof BytesWritable) {
totalVariableSize += ((BytesWritable) key).getLength();
} else if (key instanceof ByteArrayRef) {
totalVariableSize += ((ByteArrayRef) key).getData().length;
}
}
}
AggregationBuffer[] aggs = hashAggregations.get(newKeys);
for (int i = 0; i < aggs.length; i++) {
AggregationBuffer agg = aggs[i];
if (estimableAggregationEvaluators[i]) {
totalVariableSize += ((GenericUDAFEvaluator.AbstractAggregationBuffer)agg).estimate();
continue;
}
if (aggrPositions[i] != null) {
totalVariableSize += estimateSize(agg, aggrPositions[i]);
}
}
numEntriesVarSize++;
// Update the number of entries that can fit in the hash table
numEntriesHashTable =
(int) (maxHashTblMemory / (fixedRowSize + (totalVariableSize / numEntriesVarSize)));
if (isLogTraceEnabled) {
LOG.trace("Hash Aggr: #hash table = " + numEntries
+ " #max in hash table = " + numEntriesHashTable);
}
}
// flush if necessary
if (numEntries >= numEntriesHashTable) {
return true;
}
return false;
}
private int estimateSize(AggregationBuffer agg, List<Field> fields) {
int length = 0;
for (Field f : fields) {
try {
Object o = f.get(agg);
if (o instanceof String){
length += ((String)o).length();
}
else if (o instanceof ByteArrayRef){
length += ((ByteArrayRef)o).getData().length;
}
} catch (Exception e) {
// continue.. null out the field?
}
}
return length;
}
/**
* Flush hash table. This method is used by hash-based aggregations
* @param complete
* @throws HiveException
*/
private void flushHashTable(boolean complete) throws HiveException {
countAfterReport = 0;
// Currently, the algorithm flushes 10% of the entries - this can be
// changed in the future
if (complete) {
Iterator<Map.Entry<KeyWrapper, AggregationBuffer[]>> iter = hashAggregations
.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<KeyWrapper, AggregationBuffer[]> m = iter.next();
forward(m.getKey().getKeyArray(), m.getValue());
}
hashAggregations.clear();
hashAggregations = null;
if (isLogInfoEnabled) {
LOG.info("Hash Table completed flushed");
}
return;
}
int oldSize = hashAggregations.size();
if (isLogInfoEnabled) {
LOG.info("Hash Tbl flush: #hash table = " + oldSize);
}
Iterator<Map.Entry<KeyWrapper, AggregationBuffer[]>> iter = hashAggregations
.entrySet().iterator();
int numDel = 0;
while (iter.hasNext()) {
Map.Entry<KeyWrapper, AggregationBuffer[]> m = iter.next();
forward(m.getKey().getKeyArray(), m.getValue());
iter.remove();
numDel++;
if (numDel * 10 >= oldSize) {
if (isLogInfoEnabled) {
LOG.info("Hash Table flushed: new size = " + hashAggregations.size());
}
return;
}
}
}
transient Object[] forwardCache;
/**
* Forward a record of keys and aggregation results.
*
* @param keys
* The keys in the record
* @throws HiveException
*/
private void forward(Object[] keys, AggregationBuffer[] aggs) throws HiveException {
if (forwardCache == null) {
forwardCache = new Object[outputKeyLength + aggs.length];
}
for (int i = 0; i < outputKeyLength; i++) {
forwardCache[i] = keys[i];
}
for (int i = 0; i < aggs.length; i++) {
forwardCache[outputKeyLength + i] = aggregationEvaluators[i].evaluate(aggs[i]);
}
forward(forwardCache, outputObjInspector);
}
/**
* Forward all aggregations to children. It is only used by DemuxOperator.
* @throws HiveException
*/
@Override
public void flush() throws HiveException{
try {
if (hashAggregations != null) {
if (isLogInfoEnabled) {
LOG.info("Begin Hash Table flush: size = "
+ hashAggregations.size());
}
Iterator iter = hashAggregations.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<KeyWrapper, AggregationBuffer[]> m = (Map.Entry) iter
.next();
forward(m.getKey().getKeyArray(), m.getValue());
iter.remove();
}
hashAggregations.clear();
} else if (aggregations != null) {
// sort-based aggregations
if (currentKeys != null) {
forward(currentKeys.getKeyArray(), aggregations);
}
currentKeys = null;
} else {
// The GroupByOperator is not initialized, which means there is no
// data
// (since we initialize the operators when we see the first record).
// Just do nothing here.
}
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* We need to forward all the aggregations to children.
*
*/
@Override
public void closeOp(boolean abort) throws HiveException {
if (!abort) {
try {
// If there is no grouping key and no row came to this operator
if (firstRow && (keyFields.length == 0)) {
firstRow = false;
// There is no grouping key - simulate a null row
// This is based on the assumption that a null row is ignored by
// aggregation functions
for (int ai = 0; ai < aggregations.length; ai++) {
// o is set to NULL in order to distinguish no rows at all
Object[] o;
if (aggregationParameterFields[ai].length > 0) {
o = new Object[aggregationParameterFields[ai].length];
} else {
o = null;
}
// Calculate the parameters
for (int pi = 0; pi < aggregationParameterFields[ai].length; pi++) {
o[pi] = null;
}
aggregationEvaluators[ai].aggregate(aggregations[ai], o);
}
// create dummy keys - size 0
forward(new Object[0], aggregations);
} else {
flush();
}
} catch (Exception e) {
throw new HiveException(e);
}
}
hashAggregations = null;
super.closeOp(abort);
}
// Group by contains the columns needed - no need to aggregate from children
public List<String> genColLists(
HashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx) {
List<String> colLists = new ArrayList<String>();
ArrayList<ExprNodeDesc> keys = conf.getKeys();
for (ExprNodeDesc key : keys) {
colLists = Utilities.mergeUniqElems(colLists, key.getCols());
}
ArrayList<AggregationDesc> aggrs = conf.getAggregators();
for (AggregationDesc aggr : aggrs) {
ArrayList<ExprNodeDesc> params = aggr.getParameters();
for (ExprNodeDesc param : params) {
colLists = Utilities.mergeUniqElems(colLists, param.getCols());
}
}
return colLists;
}
/**
* @return the name of the operator
*/
@Override
public String getName() {
return GroupByOperator.getOperatorName();
}
static public String getOperatorName() {
return "GBY";
}
@Override
public OperatorType getType() {
return OperatorType.GROUPBY;
}
/**
* we can push the limit above GBY (running in Reducer), since that will generate single row
* for each group. This doesn't necessarily hold for GBY (running in Mappers),
* so we don't push limit above it.
*/
@Override
public boolean acceptLimitPushdown() {
return getConf().getMode() == GroupByDesc.Mode.MERGEPARTIAL ||
getConf().getMode() == GroupByDesc.Mode.COMPLETE;
}
}
| apache-2.0 |
paulnguyen/cmpe279 | modules/module8/java/signature/VerSig.java | 1713 |
import java.io.*;
import java.security.*;
import java.security.spec.*;
class VerSig {
public static void main(String[] args) {
/* Verify a DSA signature */
if (args.length != 3) {
System.out.println("Usage: VerSig " +
"publickeyfile signaturefile " + "datafile");
}
else try {
// Read in the encoded public key bytes
FileInputStream keyfis = new FileInputStream(args[0]);
byte[] encKey = new byte[keyfis.available()];
keyfis.read(encKey);
keyfis.close();
// Generate Public Key from Spec
X509EncodedKeySpec pubKeySpec = new X509EncodedKeySpec(encKey);
KeyFactory keyFactory = KeyFactory.getInstance("DSA", "SUN");
PublicKey pubKey = keyFactory.generatePublic(pubKeySpec);
// Read in the Signature Bytes
FileInputStream sigfis = new FileInputStream(args[1]);
byte[] sigToVerify = new byte[sigfis.available()];
sigfis.read(sigToVerify);
sigfis.close();
// Initialize the Signature Object for Verification
Signature sig = Signature.getInstance("SHA1withDSA", "SUN");
sig.initVerify(pubKey);
// Supply the Signature Object With the Data to be Verified
FileInputStream datafis = new FileInputStream(args[2]);
BufferedInputStream bufin = new BufferedInputStream(datafis);
byte[] buffer = new byte[1024];
int len;
while (bufin.available() != 0) {
len = bufin.read(buffer);
sig.update(buffer, 0, len);
};
bufin.close();
// Verify the Signature
boolean verifies = sig.verify(sigToVerify);
System.out.println("signature verifies: " + verifies);
} catch (Exception e) {
System.err.println("Caught exception " + e.toString());
}
}
}
| apache-2.0 |
tyazid/Exoplayer_VLC | Studio_Project/ExoPlayerDemo/src/main/java/com/google/android/exoplayer/demo/player/UdpRtpRendererBuilder.java | 3580 | package com.google.android.exoplayer.demo.player;
import android.content.Context;
import android.media.MediaCodec;
import android.net.Uri;
import android.net.wifi.WifiManager;
import android.os.Handler;
import android.util.Log;
import android.widget.TextView;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.demo.DemoUtil;
import com.google.android.exoplayer.raw.RawBufferedSource;
import com.google.android.exoplayer.raw.RawSampleSource;
import com.google.android.exoplayer.raw.RtpSampleSource;
import com.google.android.exoplayer.raw.parser.RawExtractor;
import com.google.android.exoplayer.raw.parser.TsExtractor;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.RawHttpDataSource;
import com.google.android.exoplayer.upstream.UdpMulticastDataSource;
import java.io.IOException;
import java.net.InetAddress;
import java.net.MulticastSocket;
/**
* Created by ataldir on 26/02/2015.
*/
public class UdpRtpRendererBuilder implements DemoPlayer.RendererBuilder {
private static final String TAG = "UdpRtpRendererBuilder";
private static final int BUFFER_POOL_LENGTH = 256*1024;
private final Context context;
private final Uri uri;
private final TextView debugTextView;
private final String userAgent;
private final int playerType;
public UdpRtpRendererBuilder(Context context, String userAgent, String uri, TextView debugTextView, int playerType) {
this.context = context;
this.uri = Uri.parse(uri);
this.debugTextView = debugTextView;
this.userAgent = userAgent;
this.playerType = playerType;
}
@Override
public void buildRenderers(DemoPlayer player, DemoPlayer.RendererBuilderCallback callback) {
// Build the video and audio renderers.
Log.d(TAG, "buildRenderers(): uri=" + uri.toString());
Handler mainHandler = player.getMainHandler();
RawExtractor extractor = null;
BufferPool bufferPool = new BufferPool(this.BUFFER_POOL_LENGTH);
extractor = new TsExtractor(false, 0, bufferPool);
DataSource videoDataSource = new UdpMulticastDataSource();
DataSource rawSource = new RtpSampleSource(videoDataSource);
SampleSource sampleSource = new RawSampleSource(rawSource, this.uri, this.context, extractor);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource);
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer)
: null;
// Invoke the callback.
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(null, null, renderers);
}
}
| apache-2.0 |
beer-brew/beer-vending-machine-android | app/src/test/java/beer/brew/vendingmachine/ApplicationTestCase.java | 409 | package beer.brew.vendingmachine;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import beer.brew.vendingmachine.util.DefaultConfig;
@RunWith(RobolectricTestRunner.class)
@Config(constants = BuildConfig.class, packageName = DefaultConfig.APPLICATION_ID, sdk = DefaultConfig.EMULATE_SDK)
abstract public class ApplicationTestCase {
} | apache-2.0 |
ederign/kie-wb-common | kie-wb-common-forms/kie-wb-common-forms-commons/kie-wb-common-forms-serialization/src/main/java/org/kie/workbench/common/forms/serialization/FormDefinitionSerializer.java | 883 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.forms.serialization;
import org.kie.workbench.common.forms.model.FormDefinition;
public interface FormDefinitionSerializer {
String serialize(FormDefinition form);
FormDefinition deserialize(String serializedForm);
}
| apache-2.0 |
sirthias/parboiled | parboiled-core/src/main/java/org/parboiled/matchers/StringMatcher.java | 1774 | /*
* Copyright (C) 2009-2011 Mathias Doenitz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.parboiled.matchers;
import static org.parboiled.common.Preconditions.*;
import org.parboiled.MatcherContext;
import org.parboiled.Rule;
/**
* A {@link SequenceMatcher} specialization for sequences of CharMatchers. Performs fast string matching if the
* current context has it enabled.
*/
public class StringMatcher extends SequenceMatcher {
public final char[] characters;
public StringMatcher(Rule[] charMatchers, char[] characters) {
super(checkArgNotNull(charMatchers, "charMatchers"));
this.characters = characters;
}
@Override
public String getLabel() {
return super.getLabel() != null ? super.getLabel() : '"' + String.valueOf(characters) + '"';
}
@Override
public boolean hasCustomLabel() {
return true;
}
@Override
public boolean match(MatcherContext context) {
if (!context.fastStringMatching()) {
return super.match(context);
}
if (!context.getInputBuffer().test(context.getCurrentIndex(), characters)) return false;
context.advanceIndex(characters.length);
context.createNode();
return true;
}
}
| apache-2.0 |
java-prolog-connectivity/jpc | src/main/java/org/jpc/mapping/typesolver/catalog/ListTypeSolver.java | 443 | package org.jpc.mapping.typesolver.catalog;
import java.lang.reflect.Type;
import java.util.List;
import org.jconverter.typesolver.UnrecognizedObjectException;
import org.jconverter.typesolver.TypeSolver;
import org.jpc.term.Compound;
public class ListTypeSolver implements TypeSolver<Compound> {
@Override
public Type inferType(Compound term) {
if(term.isList())
return List.class;
throw new UnrecognizedObjectException();
}
}
| apache-2.0 |
googleapis/java-dialogflow | google-cloud-dialogflow/src/main/java/com/google/cloud/dialogflow/v2/stub/GrpcEntityTypesStub.java | 24902 | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.v2.stub;
import static com.google.cloud.dialogflow.v2.EntityTypesClient.ListEntityTypesPagedResponse;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dialogflow.v2.BatchCreateEntitiesRequest;
import com.google.cloud.dialogflow.v2.BatchDeleteEntitiesRequest;
import com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest;
import com.google.cloud.dialogflow.v2.BatchUpdateEntitiesRequest;
import com.google.cloud.dialogflow.v2.BatchUpdateEntityTypesRequest;
import com.google.cloud.dialogflow.v2.BatchUpdateEntityTypesResponse;
import com.google.cloud.dialogflow.v2.CreateEntityTypeRequest;
import com.google.cloud.dialogflow.v2.DeleteEntityTypeRequest;
import com.google.cloud.dialogflow.v2.EntityType;
import com.google.cloud.dialogflow.v2.GetEntityTypeRequest;
import com.google.cloud.dialogflow.v2.ListEntityTypesRequest;
import com.google.cloud.dialogflow.v2.ListEntityTypesResponse;
import com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest;
import com.google.common.collect.ImmutableMap;
import com.google.longrunning.Operation;
import com.google.longrunning.stub.GrpcOperationsStub;
import com.google.protobuf.Empty;
import com.google.protobuf.Struct;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the EntityTypes service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class GrpcEntityTypesStub extends EntityTypesStub {
private static final MethodDescriptor<ListEntityTypesRequest, ListEntityTypesResponse>
listEntityTypesMethodDescriptor =
MethodDescriptor.<ListEntityTypesRequest, ListEntityTypesResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dialogflow.v2.EntityTypes/ListEntityTypes")
.setRequestMarshaller(
ProtoUtils.marshaller(ListEntityTypesRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListEntityTypesResponse.getDefaultInstance()))
.build();
private static final MethodDescriptor<GetEntityTypeRequest, EntityType>
getEntityTypeMethodDescriptor =
MethodDescriptor.<GetEntityTypeRequest, EntityType>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dialogflow.v2.EntityTypes/GetEntityType")
.setRequestMarshaller(
ProtoUtils.marshaller(GetEntityTypeRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(EntityType.getDefaultInstance()))
.build();
private static final MethodDescriptor<CreateEntityTypeRequest, EntityType>
createEntityTypeMethodDescriptor =
MethodDescriptor.<CreateEntityTypeRequest, EntityType>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dialogflow.v2.EntityTypes/CreateEntityType")
.setRequestMarshaller(
ProtoUtils.marshaller(CreateEntityTypeRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(EntityType.getDefaultInstance()))
.build();
private static final MethodDescriptor<UpdateEntityTypeRequest, EntityType>
updateEntityTypeMethodDescriptor =
MethodDescriptor.<UpdateEntityTypeRequest, EntityType>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dialogflow.v2.EntityTypes/UpdateEntityType")
.setRequestMarshaller(
ProtoUtils.marshaller(UpdateEntityTypeRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(EntityType.getDefaultInstance()))
.build();
private static final MethodDescriptor<DeleteEntityTypeRequest, Empty>
deleteEntityTypeMethodDescriptor =
MethodDescriptor.<DeleteEntityTypeRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dialogflow.v2.EntityTypes/DeleteEntityType")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteEntityTypeRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.build();
private static final MethodDescriptor<BatchUpdateEntityTypesRequest, Operation>
batchUpdateEntityTypesMethodDescriptor =
MethodDescriptor.<BatchUpdateEntityTypesRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dialogflow.v2.EntityTypes/BatchUpdateEntityTypes")
.setRequestMarshaller(
ProtoUtils.marshaller(BatchUpdateEntityTypesRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.build();
private static final MethodDescriptor<BatchDeleteEntityTypesRequest, Operation>
batchDeleteEntityTypesMethodDescriptor =
MethodDescriptor.<BatchDeleteEntityTypesRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dialogflow.v2.EntityTypes/BatchDeleteEntityTypes")
.setRequestMarshaller(
ProtoUtils.marshaller(BatchDeleteEntityTypesRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.build();
private static final MethodDescriptor<BatchCreateEntitiesRequest, Operation>
batchCreateEntitiesMethodDescriptor =
MethodDescriptor.<BatchCreateEntitiesRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dialogflow.v2.EntityTypes/BatchCreateEntities")
.setRequestMarshaller(
ProtoUtils.marshaller(BatchCreateEntitiesRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.build();
private static final MethodDescriptor<BatchUpdateEntitiesRequest, Operation>
batchUpdateEntitiesMethodDescriptor =
MethodDescriptor.<BatchUpdateEntitiesRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dialogflow.v2.EntityTypes/BatchUpdateEntities")
.setRequestMarshaller(
ProtoUtils.marshaller(BatchUpdateEntitiesRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.build();
private static final MethodDescriptor<BatchDeleteEntitiesRequest, Operation>
batchDeleteEntitiesMethodDescriptor =
MethodDescriptor.<BatchDeleteEntitiesRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.dialogflow.v2.EntityTypes/BatchDeleteEntities")
.setRequestMarshaller(
ProtoUtils.marshaller(BatchDeleteEntitiesRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.build();
private final UnaryCallable<ListEntityTypesRequest, ListEntityTypesResponse>
listEntityTypesCallable;
private final UnaryCallable<ListEntityTypesRequest, ListEntityTypesPagedResponse>
listEntityTypesPagedCallable;
private final UnaryCallable<GetEntityTypeRequest, EntityType> getEntityTypeCallable;
private final UnaryCallable<CreateEntityTypeRequest, EntityType> createEntityTypeCallable;
private final UnaryCallable<UpdateEntityTypeRequest, EntityType> updateEntityTypeCallable;
private final UnaryCallable<DeleteEntityTypeRequest, Empty> deleteEntityTypeCallable;
private final UnaryCallable<BatchUpdateEntityTypesRequest, Operation>
batchUpdateEntityTypesCallable;
private final OperationCallable<
BatchUpdateEntityTypesRequest, BatchUpdateEntityTypesResponse, Struct>
batchUpdateEntityTypesOperationCallable;
private final UnaryCallable<BatchDeleteEntityTypesRequest, Operation>
batchDeleteEntityTypesCallable;
private final OperationCallable<BatchDeleteEntityTypesRequest, Empty, Struct>
batchDeleteEntityTypesOperationCallable;
private final UnaryCallable<BatchCreateEntitiesRequest, Operation> batchCreateEntitiesCallable;
private final OperationCallable<BatchCreateEntitiesRequest, Empty, Struct>
batchCreateEntitiesOperationCallable;
private final UnaryCallable<BatchUpdateEntitiesRequest, Operation> batchUpdateEntitiesCallable;
private final OperationCallable<BatchUpdateEntitiesRequest, Empty, Struct>
batchUpdateEntitiesOperationCallable;
private final UnaryCallable<BatchDeleteEntitiesRequest, Operation> batchDeleteEntitiesCallable;
private final OperationCallable<BatchDeleteEntitiesRequest, Empty, Struct>
batchDeleteEntitiesOperationCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcEntityTypesStub create(EntityTypesStubSettings settings)
throws IOException {
return new GrpcEntityTypesStub(settings, ClientContext.create(settings));
}
public static final GrpcEntityTypesStub create(ClientContext clientContext) throws IOException {
return new GrpcEntityTypesStub(EntityTypesStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcEntityTypesStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcEntityTypesStub(
EntityTypesStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcEntityTypesStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcEntityTypesStub(EntityTypesStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new GrpcEntityTypesCallableFactory());
}
/**
* Constructs an instance of GrpcEntityTypesStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcEntityTypesStub(
EntityTypesStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<ListEntityTypesRequest, ListEntityTypesResponse>
listEntityTypesTransportSettings =
GrpcCallSettings.<ListEntityTypesRequest, ListEntityTypesResponse>newBuilder()
.setMethodDescriptor(listEntityTypesMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
GrpcCallSettings<GetEntityTypeRequest, EntityType> getEntityTypeTransportSettings =
GrpcCallSettings.<GetEntityTypeRequest, EntityType>newBuilder()
.setMethodDescriptor(getEntityTypeMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("name", String.valueOf(request.getName()));
return params.build();
})
.build();
GrpcCallSettings<CreateEntityTypeRequest, EntityType> createEntityTypeTransportSettings =
GrpcCallSettings.<CreateEntityTypeRequest, EntityType>newBuilder()
.setMethodDescriptor(createEntityTypeMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
GrpcCallSettings<UpdateEntityTypeRequest, EntityType> updateEntityTypeTransportSettings =
GrpcCallSettings.<UpdateEntityTypeRequest, EntityType>newBuilder()
.setMethodDescriptor(updateEntityTypeMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("entity_type.name", String.valueOf(request.getEntityType().getName()));
return params.build();
})
.build();
GrpcCallSettings<DeleteEntityTypeRequest, Empty> deleteEntityTypeTransportSettings =
GrpcCallSettings.<DeleteEntityTypeRequest, Empty>newBuilder()
.setMethodDescriptor(deleteEntityTypeMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("name", String.valueOf(request.getName()));
return params.build();
})
.build();
GrpcCallSettings<BatchUpdateEntityTypesRequest, Operation>
batchUpdateEntityTypesTransportSettings =
GrpcCallSettings.<BatchUpdateEntityTypesRequest, Operation>newBuilder()
.setMethodDescriptor(batchUpdateEntityTypesMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
GrpcCallSettings<BatchDeleteEntityTypesRequest, Operation>
batchDeleteEntityTypesTransportSettings =
GrpcCallSettings.<BatchDeleteEntityTypesRequest, Operation>newBuilder()
.setMethodDescriptor(batchDeleteEntityTypesMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
GrpcCallSettings<BatchCreateEntitiesRequest, Operation> batchCreateEntitiesTransportSettings =
GrpcCallSettings.<BatchCreateEntitiesRequest, Operation>newBuilder()
.setMethodDescriptor(batchCreateEntitiesMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
GrpcCallSettings<BatchUpdateEntitiesRequest, Operation> batchUpdateEntitiesTransportSettings =
GrpcCallSettings.<BatchUpdateEntitiesRequest, Operation>newBuilder()
.setMethodDescriptor(batchUpdateEntitiesMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
GrpcCallSettings<BatchDeleteEntitiesRequest, Operation> batchDeleteEntitiesTransportSettings =
GrpcCallSettings.<BatchDeleteEntitiesRequest, Operation>newBuilder()
.setMethodDescriptor(batchDeleteEntitiesMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
this.listEntityTypesCallable =
callableFactory.createUnaryCallable(
listEntityTypesTransportSettings, settings.listEntityTypesSettings(), clientContext);
this.listEntityTypesPagedCallable =
callableFactory.createPagedCallable(
listEntityTypesTransportSettings, settings.listEntityTypesSettings(), clientContext);
this.getEntityTypeCallable =
callableFactory.createUnaryCallable(
getEntityTypeTransportSettings, settings.getEntityTypeSettings(), clientContext);
this.createEntityTypeCallable =
callableFactory.createUnaryCallable(
createEntityTypeTransportSettings, settings.createEntityTypeSettings(), clientContext);
this.updateEntityTypeCallable =
callableFactory.createUnaryCallable(
updateEntityTypeTransportSettings, settings.updateEntityTypeSettings(), clientContext);
this.deleteEntityTypeCallable =
callableFactory.createUnaryCallable(
deleteEntityTypeTransportSettings, settings.deleteEntityTypeSettings(), clientContext);
this.batchUpdateEntityTypesCallable =
callableFactory.createUnaryCallable(
batchUpdateEntityTypesTransportSettings,
settings.batchUpdateEntityTypesSettings(),
clientContext);
this.batchUpdateEntityTypesOperationCallable =
callableFactory.createOperationCallable(
batchUpdateEntityTypesTransportSettings,
settings.batchUpdateEntityTypesOperationSettings(),
clientContext,
operationsStub);
this.batchDeleteEntityTypesCallable =
callableFactory.createUnaryCallable(
batchDeleteEntityTypesTransportSettings,
settings.batchDeleteEntityTypesSettings(),
clientContext);
this.batchDeleteEntityTypesOperationCallable =
callableFactory.createOperationCallable(
batchDeleteEntityTypesTransportSettings,
settings.batchDeleteEntityTypesOperationSettings(),
clientContext,
operationsStub);
this.batchCreateEntitiesCallable =
callableFactory.createUnaryCallable(
batchCreateEntitiesTransportSettings,
settings.batchCreateEntitiesSettings(),
clientContext);
this.batchCreateEntitiesOperationCallable =
callableFactory.createOperationCallable(
batchCreateEntitiesTransportSettings,
settings.batchCreateEntitiesOperationSettings(),
clientContext,
operationsStub);
this.batchUpdateEntitiesCallable =
callableFactory.createUnaryCallable(
batchUpdateEntitiesTransportSettings,
settings.batchUpdateEntitiesSettings(),
clientContext);
this.batchUpdateEntitiesOperationCallable =
callableFactory.createOperationCallable(
batchUpdateEntitiesTransportSettings,
settings.batchUpdateEntitiesOperationSettings(),
clientContext,
operationsStub);
this.batchDeleteEntitiesCallable =
callableFactory.createUnaryCallable(
batchDeleteEntitiesTransportSettings,
settings.batchDeleteEntitiesSettings(),
clientContext);
this.batchDeleteEntitiesOperationCallable =
callableFactory.createOperationCallable(
batchDeleteEntitiesTransportSettings,
settings.batchDeleteEntitiesOperationSettings(),
clientContext,
operationsStub);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<ListEntityTypesRequest, ListEntityTypesResponse> listEntityTypesCallable() {
return listEntityTypesCallable;
}
@Override
public UnaryCallable<ListEntityTypesRequest, ListEntityTypesPagedResponse>
listEntityTypesPagedCallable() {
return listEntityTypesPagedCallable;
}
@Override
public UnaryCallable<GetEntityTypeRequest, EntityType> getEntityTypeCallable() {
return getEntityTypeCallable;
}
@Override
public UnaryCallable<CreateEntityTypeRequest, EntityType> createEntityTypeCallable() {
return createEntityTypeCallable;
}
@Override
public UnaryCallable<UpdateEntityTypeRequest, EntityType> updateEntityTypeCallable() {
return updateEntityTypeCallable;
}
@Override
public UnaryCallable<DeleteEntityTypeRequest, Empty> deleteEntityTypeCallable() {
return deleteEntityTypeCallable;
}
@Override
public UnaryCallable<BatchUpdateEntityTypesRequest, Operation> batchUpdateEntityTypesCallable() {
return batchUpdateEntityTypesCallable;
}
@Override
public OperationCallable<BatchUpdateEntityTypesRequest, BatchUpdateEntityTypesResponse, Struct>
batchUpdateEntityTypesOperationCallable() {
return batchUpdateEntityTypesOperationCallable;
}
@Override
public UnaryCallable<BatchDeleteEntityTypesRequest, Operation> batchDeleteEntityTypesCallable() {
return batchDeleteEntityTypesCallable;
}
@Override
public OperationCallable<BatchDeleteEntityTypesRequest, Empty, Struct>
batchDeleteEntityTypesOperationCallable() {
return batchDeleteEntityTypesOperationCallable;
}
@Override
public UnaryCallable<BatchCreateEntitiesRequest, Operation> batchCreateEntitiesCallable() {
return batchCreateEntitiesCallable;
}
@Override
public OperationCallable<BatchCreateEntitiesRequest, Empty, Struct>
batchCreateEntitiesOperationCallable() {
return batchCreateEntitiesOperationCallable;
}
@Override
public UnaryCallable<BatchUpdateEntitiesRequest, Operation> batchUpdateEntitiesCallable() {
return batchUpdateEntitiesCallable;
}
@Override
public OperationCallable<BatchUpdateEntitiesRequest, Empty, Struct>
batchUpdateEntitiesOperationCallable() {
return batchUpdateEntitiesOperationCallable;
}
@Override
public UnaryCallable<BatchDeleteEntitiesRequest, Operation> batchDeleteEntitiesCallable() {
return batchDeleteEntitiesCallable;
}
@Override
public OperationCallable<BatchDeleteEntitiesRequest, Empty, Struct>
batchDeleteEntitiesOperationCallable() {
return batchDeleteEntitiesOperationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
| apache-2.0 |
dagnir/aws-sdk-java | aws-java-sdk-emr/src/main/java/com/amazonaws/services/elasticmapreduce/model/transform/DeleteSecurityConfigurationResultJsonUnmarshaller.java | 1773 | /*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticmapreduce.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.elasticmapreduce.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DeleteSecurityConfigurationResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeleteSecurityConfigurationResultJsonUnmarshaller implements Unmarshaller<DeleteSecurityConfigurationResult, JsonUnmarshallerContext> {
public DeleteSecurityConfigurationResult unmarshall(JsonUnmarshallerContext context) throws Exception {
DeleteSecurityConfigurationResult deleteSecurityConfigurationResult = new DeleteSecurityConfigurationResult();
return deleteSecurityConfigurationResult;
}
private static DeleteSecurityConfigurationResultJsonUnmarshaller instance;
public static DeleteSecurityConfigurationResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DeleteSecurityConfigurationResultJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
olamy/archiva | archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RepositoryContent.java | 1875 | package org.apache.archiva.repository;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.model.VersionedReference;
/**
* Common aspects of content provider interfaces
*/
public interface RepositoryContent
{
/**
* Given a repository relative path to a filename, return the {@link VersionedReference} object suitable for the path.
*
* @param path the path relative to the repository base dir for the artifact.
* @return the {@link ArtifactReference} representing the path. (or null if path cannot be converted to
* a {@link ArtifactReference})
* @throws LayoutException if there was a problem converting the path to an artifact.
*/
ArtifactReference toArtifactReference( String path )
throws LayoutException;
/**
* Given an {@link ArtifactReference}, return the relative path to the artifact.
*
* @param reference the artifact reference to use.
* @return the relative path to the artifact.
*/
String toPath( ArtifactReference reference );
}
| apache-2.0 |
Kukanani/G-Force | src/com/gamebrewers/speedgame09/Constants.java | 347 | package com.gamebrewers.speedgame09;
import java.util.Random;
public class Constants {
public static int offsetX = 0, offsetY = 0, screenSizeX = 0, screenSizeY = 0;
public static Random random = new Random();
private Constants() {}
public static int assertColor(int c) {
if(c > 255) return 255;
if(c < 0) return 0;
return c;
}
}
| apache-2.0 |
spinaki/android-camera | library/src/main/java/xyz/pinaki/android/camera/preview/TextureViewPreview.java | 4823 | package xyz.pinaki.android.camera.preview;
import android.content.Context;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.util.Log;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
/**
* Created by pinaki on 8/19/17.
*/
public final class TextureViewPreview extends ViewFinderPreview {
private static final String TAG = TextureViewPreview.class.getName();
private TextureView textureView;
ViewGroup parentView;
Context context;
private int displayOrientation;
public TextureViewPreview(Context c, ViewGroup parent, Callback callback) {
super(callback);
parentView = parent;
context = c;
}
@Override
public Surface getSurface() {
return new Surface(textureView.getSurfaceTexture());
}
@Override
public SurfaceTexture getSurfaceTexture() {
return textureView.getSurfaceTexture();
}
@Override
public View getView() {
return textureView;
}
@Override
public void start() {
textureView = new TextureView(context);
parentView.addView(textureView, 0);
textureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.i(TAG, "TextureViewPreview onSurfaceTextureAvailable");
setSize(width, height);
// configureTransform(); // TODO: necessary ?
// dispatchSurfaceCreated();
dispatchSurfaceChanged(); // TODO: check ?
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
Log.i(TAG, "TextureViewPreview onSurfaceTextureSizeChanged");
setSize(width, height);
// configureTransform();
// changes is already triggered from onSurfaceTextureAvailable -- so might not be needed here.
// TODO verify this
// dispatchSurfaceChanged();
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
setSize(0, 0);
dispatchSurfaceDestroyed();
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
// do nothing ??
}
});
}
// only from camera2 ??
// @Override
// void setDisplayOrientation(int d) {
// displayOrientation = d;
// configureTransform();
// }
// does nothing for camera 1 since displayOrientation is not set
private void configureTransform() {
Matrix matrix = new Matrix();
if (displayOrientation % 180 == 90) {
final int width = getWidth();
final int height = getHeight();
// Rotate the camera preview when the screen is landscape.
matrix.setPolyToPoly(
new float[]{
0.f, 0.f, // top left
width, 0.f, // top right
0.f, height, // bottom left
width, height, // bottom right
}, 0,
displayOrientation == 90 ?
// Clockwise
new float[]{
0.f, height, // top left
0.f, 0.f, // top right
width, height, // bottom left
width, 0.f, // bottom right
} : // displayOrientation == 270
// Counter-clockwise
new float[]{
width, 0.f, // top left
width, height, // top right
0.f, 0.f, // bottom left
0.f, height, // bottom right
}, 0,
4);
} else if (displayOrientation == 180) {
matrix.postRotate(180, getWidth() / 2, getHeight() / 2);
}
textureView.setTransform(matrix);
}
@Override
public void stop() {
parentView.removeView(textureView);
textureView = null;
}
@Override
public Class gePreviewType() {
return SurfaceTexture.class;
}
// This method is called only from Camera2.
@Override
public void setBufferSize(int width, int height) {
textureView.getSurfaceTexture().setDefaultBufferSize(width, height);
}
}
| apache-2.0 |
variac/bazel | src/main/java/com/google/devtools/build/lib/packages/AggregatingAttributeMapper.java | 25221 | // Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.packages;
import com.google.common.base.Verify;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.CollectionUtils;
import com.google.devtools.build.lib.packages.Attribute.ComputationLimiter;
import com.google.devtools.build.lib.packages.BuildType.Selector;
import com.google.devtools.build.lib.packages.BuildType.SelectorList;
import com.google.devtools.build.lib.syntax.Type;
import com.google.devtools.build.lib.syntax.Type.LabelVisitor;
import com.google.devtools.build.lib.syntax.Type.ListType;
import com.google.devtools.build.lib.util.Preconditions;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nullable;
/**
* {@link AttributeMap} implementation that provides the ability to retrieve <i>all possible</i>
* values an attribute might take.
*/
public class AggregatingAttributeMapper extends AbstractAttributeMapper {
private final Rule rule;
private AggregatingAttributeMapper(Rule rule) {
super(rule.getPackage(), rule.getRuleClassObject(), rule.getLabel(),
rule.getAttributeContainer());
this.rule = rule;
}
public static AggregatingAttributeMapper of(Rule rule) {
return new AggregatingAttributeMapper(rule);
}
/**
* Returns all of this rule's attributes that are non-configurable. These are unconditionally
* available to computed defaults no matter what dependencies they've declared.
*/
private List<String> getNonConfigurableAttributes() {
return rule.getRuleClassObject().getNonConfigurableAttributes();
}
/**
* Override that also visits the rule's configurable attribute keys (which are themselves labels).
*
* <p>Note that we directly parse the selectors rather than just calling {@link #visitAttribute}
* to iterate over all possible values. That's because {@link #visitAttribute} can grow
* exponentially with respect to the number of selects (e.g. if an attribute uses three selects
* with three conditions each, it can take nine possible values). So we want to avoid that code
* path whenever actual value iteration isn't specifically needed.
*/
@Override
protected void visitLabels(Attribute attribute, Type.LabelVisitor<Attribute> visitor)
throws InterruptedException {
visitLabels(attribute, true, visitor);
}
private void visitLabels(
Attribute attribute, boolean includeSelectKeys, Type.LabelVisitor<Attribute> visitor)
throws InterruptedException {
Type<?> type = attribute.getType();
SelectorList<?> selectorList = getSelectorList(attribute.getName(), type);
if (selectorList == null) {
if (getComputedDefault(attribute.getName(), attribute.getType()) != null) {
// Computed defaults are a special pain: we have no choice but to iterate through their
// (computed) values and look for labels.
for (Object value : visitAttribute(attribute.getName(), attribute.getType())) {
if (value != null) {
type.visitLabels(visitor, value, attribute);
}
}
} else {
super.visitLabels(attribute, visitor);
}
} else {
for (Selector<?> selector : selectorList.getSelectors()) {
for (Map.Entry<Label, ?> selectorEntry : selector.getEntries().entrySet()) {
if (includeSelectKeys && !BuildType.Selector.isReservedLabel(selectorEntry.getKey())) {
visitor.visit(selectorEntry.getKey(), attribute);
}
Object value = selector.isValueSet(selectorEntry.getKey())
? selectorEntry.getValue()
: attribute.getDefaultValue(null);
type.visitLabels(visitor, value, attribute);
}
}
}
}
/**
* Returns all labels reachable via the given attribute, with duplicate instances removed.
*
* @param includeSelectKeys whether to include config_setting keys for configurable attributes
*/
public Set<Label> getReachableLabels(String attributeName, boolean includeSelectKeys)
throws InterruptedException {
final ImmutableSet.Builder<Label> builder = ImmutableSet.<Label>builder();
visitLabels(
getAttributeDefinition(attributeName),
includeSelectKeys,
new LabelVisitor<Attribute>() {
@Override
public void visit(Label label, Attribute attribute) {
builder.add(label);
}
});
return builder.build();
}
/**
* Returns the labels that might appear multiple times in the same attribute value.
*/
public Set<Label> checkForDuplicateLabels(Attribute attribute) {
String attrName = attribute.getName();
Type<?> attrType = attribute.getType();
ImmutableSet.Builder<Label> duplicates = ImmutableSet.builder();
SelectorList<?> selectorList = getSelectorList(attribute.getName(), attrType);
if (selectorList == null || selectorList.getSelectors().size() == 1) {
// Three possible scenarios:
// 1) Plain old attribute (no selects). Without selects, visitAttribute runs efficiently.
// 2) Computed default, possibly depending on other attributes using select. In this case,
// visitAttribute might be inefficient. But we have no choice but to iterate over all
// possible values (since we have to compute them), so we take the efficiency hit.
// 3) "attr = select({...})". With just a single select, visitAttribute runs efficiently.
for (Object value : visitAttribute(attrName, attrType)) {
if (value != null) {
// TODO(bazel-team): Calculate duplicates directly using attrType.visitLabels in order to
// avoid intermediate collections here.
duplicates.addAll(CollectionUtils.duplicatedElementsOf(extractLabels(attrType, value)));
}
}
} else {
// Multiple selects concatenated together. It's expensive to iterate over every possible
// value, so instead collect all labels across all the selects and check for duplicates.
// This is overly strict, since this counts duplicates across values. We can presumably
// relax this if necessary, but doing so would incur the value iteration expense this
// code path avoids.
List<Label> combinedLabels = new LinkedList<>(); // Labels that appear across all selectors.
for (Selector<?> selector : selectorList.getSelectors()) {
// Labels within a single selector. It's okay for there to be duplicates as long as
// they're in different selector paths (since only one path can actually get chosen).
Set<Label> selectorLabels = new LinkedHashSet<>();
for (Object selectorValue : selector.getEntries().values()) {
List<Label> labelsInSelectorValue = extractLabels(attrType, selectorValue);
// Duplicates within a single path are not okay.
duplicates.addAll(CollectionUtils.duplicatedElementsOf(labelsInSelectorValue));
Iterables.addAll(selectorLabels, labelsInSelectorValue);
}
combinedLabels.addAll(selectorLabels);
}
duplicates.addAll(CollectionUtils.duplicatedElementsOf(combinedLabels));
}
return duplicates.build();
}
/**
* Returns a list of the possible values of the specified attribute in the specified rule.
*
* <p>If the attribute's value is a simple value, then this returns a singleton list of that
* value.
*
* <p>If the attribute's value is an expression containing one or many {@code select(...)}
* expressions, then this returns a list of all values that expression may evaluate to.
*
* <p>If the attribute does not have an explicit value for this rule, and the rule provides a
* computed default, the computed default function is evaluated given the rule's other attribute
* values as inputs and the output is returned in a singleton list.
*
* <p>If the attribute does not have an explicit value for this rule, and the rule provides a
* computed default, and the computed default function depends on other attributes whose values
* contain {@code select(...)} expressions, then the computed default function is evaluated for
* every possible combination of input values, and the list of outputs is returned.
*/
public Iterable<Object> getPossibleAttributeValues(Rule rule, Attribute attr) {
// Values may be null, so use normal collections rather than immutable collections.
// This special case for the visibility attribute is needed because its value is replaced
// with an empty list during package loading if it is public or private in order not to visit
// the package called 'visibility'.
if (attr.getName().equals("visibility")) {
List<Object> result = new ArrayList<>(1);
result.add(rule.getVisibility().getDeclaredLabels());
return result;
}
return Lists.<Object>newArrayList(visitAttribute(attr.getName(), attr.getType()));
}
/**
* If the attribute is a selector list of list type, then this method returns a list with number
* of elements equal to the number of select statements in the selector list. Each element of this
* list is equal to concatenating every possible attribute value in a single select statement.
* The conditions themselves in the select statements are completely ignored. Returns {@code null}
* if the attribute isn't of the desired format.
*
* As an example, if we have select({a: ["a"], b: ["a", "b"]}) + select({a: ["c", "d"], c: ["e"])
* The output will be [["a", "a", "b"], ["c", "d", "e"]]. The idea behind this structure is that
* at least some of the structure in the original selector list is preserved and we know any
* possible attribute value is the result of concatenating some sublist of each element.
*/
@Nullable
public <T> Iterable<T> getConcatenatedSelectorListsOfListType(
String attributeName, Type<T> type) {
SelectorList<T> selectorList = getSelectorList(attributeName, type);
if (selectorList != null && type instanceof ListType) {
List<T> selectList = new ArrayList<>();
for (Selector<T> selector : selectorList.getSelectors()) {
selectList.add(type.concat(selector.getEntries().values()));
}
return ImmutableList.copyOf(selectList);
}
return null;
}
/**
* Returns a list of all possible values an attribute can take for this rule.
*
* <p>Note that when an attribute uses multiple selects, or is a {@link Attribute.ComputedDefault}
* that depends on configurable attributes, it can potentially take on many values. So be cautious
* about unnecessarily relying on this method.
*/
public <T> Iterable<T> visitAttribute(String attributeName, Type<T> type) {
// If this attribute value is configurable, visit all possible values.
SelectorList<T> selectorList = getSelectorList(attributeName, type);
if (selectorList != null) {
ImmutableList.Builder<T> builder = ImmutableList.builder();
visitConfigurableAttribute(selectorList.getSelectors(), new BoundSelectorPaths(), type,
null, builder);
return builder.build();
}
// If this attribute is a computed default, feed it all possible value combinations of
// its declared dependencies and return all computed results. For example, if this default
// uses attributes x and y, x can configurably be x1 or x2, and y can configurably be y1
// or y1, then compute default values for the (x1,y1), (x1,y2), (x2,y1), and (x2,y2) cases.
Attribute.ComputedDefault computedDefault = getComputedDefault(attributeName, type);
if (computedDefault != null) {
return computedDefault.getPossibleValues(type, rule);
}
// For any other attribute, just return its direct value.
T value = get(attributeName, type);
return value == null ? ImmutableList.<T>of() : ImmutableList.of(value);
}
/**
* Determines all possible values a configurable attribute can take. Do not call this method
* unless really necessary (see TODO comment inside).
*
* @param selectors the selectors that make up this attribute assignment (in order)
* @param boundSelectorPaths paths that have already been chosen from previous selectors in an
* earlier recursive call of this method. For example, given
* <pre>cmd = select({':a': 'w', ':b': 'x'}) + select({':a': 'y', ':b': 'z'})</pre>
* the only possible values for <code>cmd</code> are <code>"wy"</code> and <code>"xz"</code>.
* This is because the selects have the same conditions, so whatever matches the first also
* matches the second. Note that this doesn't work for selects with overlapping but
* <i>different</i> key sets. That's because of key specialization (see
* {@link com.google.devtools.build.lib.analysis.ConfiguredAttributeMapper} - if the
* second select also included a condition <code>':c'</code> that includes both the flags
* in <code>':a'</code> and <code>':b'</code>, <code>':c'</code> would be chosen over
* them both.
* @param type the type of this attribute
* @param currentValueSoFar the partial value produced so far from earlier calls to this method
* @param valuesBuilder output container for full values this attribute can take
*/
private <T> void visitConfigurableAttribute(List<Selector<T>> selectors,
BoundSelectorPaths boundSelectorPaths, Type<T> type, T currentValueSoFar,
ImmutableList.Builder<T> valuesBuilder) {
// TODO(bazel-team): minimize or eliminate uses of this interface. It necessarily grows
// exponentially with the number of selects in the attribute. Is that always necessary?
// For example, dependency resolution just needs to know every possible label an attribute
// might reference, but it doesn't need to know the exact combination of labels that make
// up a value. This may be even less important for non-label values (e.g. strings), which
// have no impact on the dependency structure.
if (selectors.isEmpty()) {
if (currentValueSoFar != null) {
// Null values arise when a None is used as the value of a Selector for a type without a
// default value.
// TODO(gregce): visitAttribute should probably convey that an unset attribute is possible.
// Therefore we need to actually handle null values here.
valuesBuilder.add(currentValueSoFar);
}
} else {
Selector<T> firstSelector = selectors.get(0);
List<Selector<T>> remainingSelectors = selectors.subList(1, selectors.size());
Map<Label, T> firstSelectorEntries = firstSelector.getEntries();
Label boundKey = boundSelectorPaths.getChosenKey(firstSelectorEntries.keySet());
if (boundKey != null) {
// If we've already followed some path from a previous selector with the same exact
// conditions as this one, we only need to visit that path (since the same key will
// match both selectors).
T boundValue = firstSelectorEntries.get(boundKey);
visitConfigurableAttribute(remainingSelectors, boundSelectorPaths, type,
currentValueSoFar == null
? boundValue
: type.concat(ImmutableList.of(currentValueSoFar, boundValue)),
valuesBuilder);
} else {
// Otherwise, we need to iterate over all possible paths.
for (Map.Entry<Label, T> selectorBranch : firstSelectorEntries.entrySet()) {
// Bind this particular path for later selectors using the same conditions.
boundSelectorPaths.bind(firstSelectorEntries.keySet(), selectorBranch.getKey());
visitConfigurableAttribute(remainingSelectors, boundSelectorPaths, type,
currentValueSoFar == null
? selectorBranch.getValue()
: type.concat(ImmutableList.of(currentValueSoFar, selectorBranch.getValue())),
valuesBuilder);
// Unbind the path (so when we pop back up the recursive stack we can rebind it to new
// values if we visit this selector again).
boundSelectorPaths.unbind(firstSelectorEntries.keySet());
}
}
}
}
/**
* Helper class for {@link #visitConfigurableAttribute}. See that method's comments for more
* details.
*/
private static class BoundSelectorPaths {
private final Map<Set<Label>, Label> bindings = new HashMap<>();
/**
* Binds the given config key set to the specified path. There should be no previous binding
* for this key set.
*/
public void bind(Set<Label> allKeys, Label chosenKey) {
Preconditions.checkState(allKeys.contains(chosenKey));
Verify.verify(bindings.put(allKeys, chosenKey) == null);
}
/**
* Unbinds the given config key set.
*/
public void unbind(Set<Label> allKeys) {
Verify.verifyNotNull(bindings.remove(allKeys));
}
/**
* Returns the key this config key set is bound to or null if no binding.
*/
public Label getChosenKey(Set<Label> allKeys) {
return bindings.get(allKeys);
}
}
/**
* Given a list of attributes, creates an {attrName -> attrValue} map for every possible
* combination of those attributes' values and returns a list of all the maps.
*
* <p>For example, given attributes x and y, which respectively have possible values x1, x2 and
* y1, y2, this returns:
*
* <pre>
* [
* {x: x1, y: y1},
* {x: x1, y: y2},
* {x: x2, y: y1},
* {x: x2, y: y2}
* ]
* </pre>
*
* <p>The work done by this method may be limited by providing a {@link ComputationLimiter} that
* throws if too much work is attempted.
*/
<TException extends Exception> List<Map<String, Object>> visitAttributes(
List<String> attributes, ComputationLimiter<TException> limiter) throws TException {
List<Map<String, Object>> depMaps = new LinkedList<>();
AtomicInteger combinationsSoFar = new AtomicInteger(0);
visitAttributesInner(
attributes,
depMaps,
new HashMap<String, Object>(attributes.size()),
combinationsSoFar,
limiter);
return depMaps;
}
/**
* A recursive function used in the implementation of {@link #visitAttributes}.
*
* @param attributes a list of attributes that are yet to be visited.
* @param mappings a mutable list of {attrName --> attrValue} maps collected so far. This method
* will add newly discovered maps to the list.
* @param currentMap {attrName --> attrValue} assignments accumulated so far, not including those
* in {@code attributes}. This map may be mutated and as such must be copied if we wish to
* preserve its state, such as in the base case.
* @param combinationsSoFar a counter for all previously processed combinations of possible
* values.
* @param limiter a strategy to limit the work done by invocations of this method.
*/
private <TException extends Exception> void visitAttributesInner(
List<String> attributes,
List<Map<String, Object>> mappings,
Map<String, Object> currentMap,
AtomicInteger combinationsSoFar,
ComputationLimiter<TException> limiter)
throws TException {
if (attributes.isEmpty()) {
// Because this method uses exponential time/space on the number of inputs, we may limit
// the total number of method calls.
limiter.onComputationCount(combinationsSoFar.incrementAndGet());
// Recursive base case: snapshot and store whatever's already been populated in currentMap.
mappings.add(new HashMap<>(currentMap));
return;
}
// Take the first attribute in the dependency list and iterate over all its values. For each
// value x, update currentMap with the additional entry { firstAttrName: x }, then feed
// this recursively into a subcall over all remaining dependencies. This recursively
// continues until we run out of values.
String currentAttribute = attributes.get(0);
Iterable<?> firstAttributePossibleValues =
visitAttribute(currentAttribute, getAttributeType(currentAttribute));
List<String> restOfAttrs = attributes.subList(1, attributes.size());
for (Object value : firstAttributePossibleValues) {
// Overwrite each time.
currentMap.put(currentAttribute, value);
visitAttributesInner(restOfAttrs, mappings, currentMap, combinationsSoFar, limiter);
}
}
/**
* Returns an {@link AttributeMap} that delegates to {@code AggregatingAttributeMapper.this}
* except for {@link #get} calls for attributes that are configurable. In that case, the {@link
* AttributeMap} looks up an attribute's value in {@code directMap}. Any attempt to {@link #get} a
* configurable attribute that's not in {@code directMap} causes an {@link
* IllegalArgumentException} to be thrown.
*/
AttributeMap createMapBackedAttributeMap(final Map<String, Object> directMap) {
final AggregatingAttributeMapper owner = AggregatingAttributeMapper.this;
return new AttributeMap() {
@Override
public <T> T get(String attributeName, Type<T> type) {
owner.checkType(attributeName, type);
if (getNonConfigurableAttributes().contains(attributeName)) {
return owner.get(attributeName, type);
}
if (!directMap.containsKey(attributeName)) {
throw new IllegalArgumentException(
"attribute \""
+ attributeName
+ "\" isn't available in this computed default context");
}
return type.cast(directMap.get(attributeName));
}
@Override
public boolean isConfigurable(String attributeName) {
return owner.isConfigurable(attributeName);
}
@Override
public String getName() {
return owner.getName();
}
@Override
public Label getLabel() {
return owner.getLabel();
}
@Override
public Iterable<String> getAttributeNames() {
return ImmutableList.<String>builder()
.addAll(directMap.keySet())
.addAll(getNonConfigurableAttributes())
.build();
}
@Override
public void visitLabels(AcceptsLabelAttribute observer) throws InterruptedException {
owner.visitLabels(observer);
}
@Override
public String getPackageDefaultHdrsCheck() {
return owner.getPackageDefaultHdrsCheck();
}
@Override
public Boolean getPackageDefaultTestOnly() {
return owner.getPackageDefaultTestOnly();
}
@Override
public String getPackageDefaultDeprecation() {
return owner.getPackageDefaultDeprecation();
}
@Override
public ImmutableList<String> getPackageDefaultCopts() {
return owner.getPackageDefaultCopts();
}
@Nullable
@Override
public Type<?> getAttributeType(String attrName) {
return owner.getAttributeType(attrName);
}
@Nullable
@Override
public Attribute getAttributeDefinition(String attrName) {
return owner.getAttributeDefinition(attrName);
}
@Override
public boolean isAttributeValueExplicitlySpecified(String attributeName) {
return owner.isAttributeValueExplicitlySpecified(attributeName);
}
@Override
public boolean has(String attrName) {
return owner.has(attrName);
}
@Override
public <T> boolean has(String attrName, Type<T> type) {
return owner.has(attrName, type);
}
};
}
private static ImmutableList<Label> extractLabels(Type<?> type, Object value) {
try {
final ImmutableList.Builder<Label> result = ImmutableList.builder();
type.visitLabels(
new Type.LabelVisitor<Object>() {
@Override
public void visit(@Nullable Label label, Object dummy) {
if (label != null) {
result.add(label);
}
}
},
value,
/*context=*/ null);
return result.build();
} catch (InterruptedException e) {
throw new IllegalStateException("Unexpected InterruptedException", e);
}
}
}
| apache-2.0 |
WangXijue/zstack | core/src/main/java/org/zstack/core/Platform.java | 28898 | package org.zstack.core;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.LocaleUtils;
import org.apache.commons.lang.StringUtils;
import org.reflections.Reflections;
import org.reflections.scanners.*;
import org.reflections.util.ClasspathHelper;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.context.MessageSource;
import org.springframework.context.NoSuchMessageException;
import org.springframework.web.context.WebApplicationContext;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.componentloader.ComponentLoader;
import org.zstack.core.componentloader.ComponentLoaderImpl;
import org.zstack.core.config.GlobalConfigFacade;
import org.zstack.core.db.DatabaseGlobalProperty;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.core.statemachine.StateMachine;
import org.zstack.core.statemachine.StateMachineImpl;
import org.zstack.header.Component;
import org.zstack.header.core.encrypt.ENCRYPT;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.utils.*;
import org.zstack.utils.data.StringTemplate;
import org.zstack.utils.logging.CLogger;
import org.zstack.utils.logging.CLoggerImpl;
import org.zstack.utils.network.NetworkUtils;
import org.zstack.utils.path.PathUtil;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static org.zstack.utils.CollectionDSL.e;
import static org.zstack.utils.CollectionDSL.map;
import static org.zstack.utils.StringDSL.ln;
public class Platform {
private static final CLogger logger = CLoggerImpl.getLogger(Platform.class);
private static ComponentLoader loader;
private static String msId;
private static String codeVersion;
private static String managementServerIp;
private static String managementCidr;
private static MessageSource messageSource;
public static final String COMPONENT_CLASSPATH_HOME = "componentsHome";
public static final String FAKE_UUID = "THIS_IS_IS_A_FAKE_UUID";
private static final Map<String, String> globalProperties = new HashMap<String, String>();
private static Locale locale;
public static volatile boolean IS_RUNNING = true;
private static Reflections reflections;
public static Reflections getReflections() {
return reflections;
}
public static Set<Method> encryptedMethodsMap;
private static Map<String, String> linkGlobalPropertyMap(String prefix) {
Map<String, String> ret = new HashMap<String, String>();
Map<String, String> map = getGlobalPropertiesStartWith(prefix);
if (map.isEmpty()) {
return ret;
}
for (Map.Entry<String, String> e : map.entrySet()) {
String key = StringDSL.stripStart(e.getKey(), prefix).trim();
ret.put(key, e.getValue().trim());
}
return ret;
}
private static void linkGlobalProperty(Class clz, Map<String, String> propertiesMap) {
for (Field f : clz.getDeclaredFields()) {
GlobalProperty at = f.getAnnotation(GlobalProperty.class);
if (at == null) {
continue;
}
if (!Modifier.isStatic(f.getModifiers())) {
throw new CloudRuntimeException(String.format("%s.%s is annotated by @GlobalProperty but it's not defined with static modifier", clz.getName(), f.getName()));
}
Object valueToSet = null;
String name = at.name();
if (Map.class.isAssignableFrom(f.getType())) {
Map ret = linkGlobalPropertyMap(name);
if (ret.isEmpty() && at.required()) {
throw new IllegalArgumentException(String.format("A required global property[%s] missing in zstack.properties", name));
}
valueToSet = ret;
} else if (List.class.isAssignableFrom(f.getType())) {
List ret = linkGlobalPropertyList(name);
if (ret.isEmpty() && at.required()) {
throw new IllegalArgumentException(String.format("A required global property[%s] missing in zstack.properties", name));
}
valueToSet = ret;
} else {
String value = getGlobalProperty(name);
if (value == null && at.defaultValue().equals(GlobalProperty.DEFAULT_NULL_STRING) && at.required()) {
throw new IllegalArgumentException(String.format("A required global property[%s] missing in zstack.properties", name));
}
if (value == null) {
value = at.defaultValue();
}
if (GlobalProperty.DEFAULT_NULL_STRING.equals(value)) {
value = null;
}
if (value != null) {
value = StringTemplate.substitute(value, propertiesMap);
}
if (Integer.class.isAssignableFrom(f.getType()) || Integer.TYPE.isAssignableFrom(f.getType())) {
valueToSet = TypeUtils.stringToValue(value, Integer.class, 0);
} else if (Long.class.isAssignableFrom(f.getType()) || Long.TYPE.isAssignableFrom(f.getType())) {
valueToSet = TypeUtils.stringToValue(value, Long.class, 0L);
} else if (Float.class.isAssignableFrom(f.getType()) || Float.TYPE.isAssignableFrom(f.getType())) {
valueToSet = TypeUtils.stringToValue(value, Float.class, 0F);
} else if (Double.class.isAssignableFrom(f.getType()) || Double.TYPE.isAssignableFrom(f.getType())) {
valueToSet = TypeUtils.stringToValue(value, Double.class, 0D);
} else if (String.class.isAssignableFrom(f.getType())) {
valueToSet = value;
} else if (Boolean.class.isAssignableFrom(f.getType()) || Boolean.TYPE.isAssignableFrom(f.getType())) {
valueToSet = TypeUtils.stringToValue(value, Boolean.class);
} else {
throw new CloudRuntimeException(String.format("%s.%s of type[%s] is unsupported by global property. try use Platform.getGlobalProperty() and parse by yourself",
clz.getName(), f.getName(), f.getType().getName()));
}
}
f.setAccessible(true);
try {
f.set(null, valueToSet);
globalProperties.put(name, valueToSet == null ? "null" : valueToSet.toString());
logger.debug(String.format("linked global property[%s.%s], value: %s", clz.getName(), f.getName(), valueToSet));
} catch (IllegalAccessException e) {
throw new CloudRuntimeException(String.format("unable to link global property[%s.%s]", clz.getName(), f.getName()), e);
}
}
}
public static Map<String, String> getGlobalProperties() {
return globalProperties;
}
private static List linkGlobalPropertyList(String name) {
Map<String, String> map = getGlobalPropertiesStartWith(name);
List<String> ret = new ArrayList<String>(map.size());
if (map.isEmpty()) {
return ret;
}
List<String> orderedKeys = new ArrayList<String>();
orderedKeys.addAll(map.keySet());
Collections.sort(orderedKeys);
for (String key : orderedKeys) {
String index = StringDSL.stripStart(key, name).trim();
try {
Long.valueOf(index);
} catch (NumberFormatException e) {
throw new IllegalArgumentException(String.format("[Illegal List Definition] %s is an invalid list key" +
" definition, the last character must be a number, for example %s1. %s is not a number", key, key, index));
}
ret.add(map.get(key));
}
return ret;
}
private static void linkGlobalProperty() {
Set<Class<?>> clzs = reflections.getTypesAnnotatedWith(GlobalPropertyDefinition.class);
boolean noTrim = System.getProperty("DoNotTrimPropertyFile") != null;
List<String> lst = new ArrayList<String>();
Map<String, String> propertiesMap = new HashMap<String, String>();
for (final String name: System.getProperties().stringPropertyNames()) {
String value = System.getProperty(name);
if (!noTrim) {
value = value.trim();
}
propertiesMap.put(name, value);
lst.add(String.format("%s=%s", name, value));
}
logger.debug(String.format("system properties:\n%s", StringUtils.join(lst, ",")));
for (Class clz : clzs) {
linkGlobalProperty(clz, propertiesMap);
}
}
private static void writePidFile() throws IOException {
if (CoreGlobalProperty.UNIT_TEST_ON) {
return;
}
File pidFile = new File(CoreGlobalProperty.PID_FILE_PATH);
if (pidFile.exists()) {
String pidStr = FileUtils.readFileToString(pidFile);
try {
long pid = Long.valueOf(pidStr);
String processProcDir = String.format("/proc/%s", pid);
File processProcDirFile = new File(processProcDir);
if (processProcDirFile.exists()) {
throw new CloudRuntimeException(String.format("pid file[%s] exists and the process[pid:%s] that the pid file points to is still running", CoreGlobalProperty.PID_FILE_PATH, pidStr));
}
} catch (NumberFormatException e) {
logger.warn(String.format("pid file[%s] includes an invalid pid[%s] that is not a long number, ignore it",
CoreGlobalProperty.PID_FILE_PATH, pidStr));
}
logger.info(String.format("stale pid file[%s], ignore it", CoreGlobalProperty.PID_FILE_PATH));
}
pidFile.deleteOnExit();
String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0];
FileUtils.writeStringToFile(pidFile, pid);
}
private static void prepareDefaultDbProperties() {
if (DatabaseGlobalProperty.DbUrl != null) {
String dbUrl = DatabaseGlobalProperty.DbUrl;
if (dbUrl.endsWith("/")) {
dbUrl = dbUrl.substring(0, dbUrl.length()-1);
}
if (getGlobalProperty("DbFacadeDataSource.jdbcUrl") == null) {
String url;
if (dbUrl.contains("{database}")) {
url = ln(dbUrl).formatByMap(
map(e("database", "zstack"))
);
} else {
url = String.format("%s/zstack", dbUrl);
}
System.setProperty("DbFacadeDataSource.jdbcUrl", url);
logger.debug(String.format("default DbFacadeDataSource.jdbcUrl to DB.url [%s]", url));
}
if (getGlobalProperty("RESTApiDataSource.jdbcUrl") == null) {
String url;
if (dbUrl.contains("{database}")) {
url = ln(dbUrl).formatByMap(
map(e("database", "zstack_rest"))
);
} else {
url = String.format("%s/zstack_rest", dbUrl);
}
System.setProperty("RESTApiDataSource.jdbcUrl", url);
logger.debug(String.format("default RESTApiDataSource.jdbcUrl to DB.url [%s]", url));
}
}
if (DatabaseGlobalProperty.DbUser != null) {
if (getGlobalProperty("DbFacadeDataSource.user") == null) {
System.setProperty("DbFacadeDataSource.user", DatabaseGlobalProperty.DbUser);
logger.debug(String.format("default RESTApiDataSource.user to DB.user [%s]", DatabaseGlobalProperty.DbUser));
}
if (getGlobalProperty("RESTApiDataSource.user") == null) {
System.setProperty("RESTApiDataSource.user", DatabaseGlobalProperty.DbUser);
logger.debug(String.format("default RESTApiDataSource.user to DB.user [%s]", DatabaseGlobalProperty.DbUser));
}
}
if (DatabaseGlobalProperty.DbPassword != null) {
if (getGlobalProperty("DbFacadeDataSource.password") == null) {
System.setProperty("DbFacadeDataSource.password", DatabaseGlobalProperty.DbPassword);
logger.debug(String.format("default DbFacadeDataSource.password to DB.password [%s]", DatabaseGlobalProperty.DbPassword));
}
if (getGlobalProperty("RESTApiDataSource.password") == null) {
System.setProperty("RESTApiDataSource.password", DatabaseGlobalProperty.DbPassword);
logger.debug(String.format("default RESTApiDataSource.password to DB.password [%s]", DatabaseGlobalProperty.DbPassword));
}
}
if (DatabaseGlobalProperty.DbMaxIdleTime != null) {
if (getGlobalProperty("DbFacadeDataSource.maxIdleTime") == null) {
System.setProperty("DbFacadeDataSource.maxIdleTime", DatabaseGlobalProperty.DbMaxIdleTime);
logger.debug(String.format("default DbFacadeDataSource.maxIdleTime to DB.maxIdleTime [%s]", DatabaseGlobalProperty.DbMaxIdleTime));
}
if (getGlobalProperty("ExtraDataSource.maxIdleTime") == null) {
System.setProperty("ExtraDataSource.maxIdleTime", DatabaseGlobalProperty.DbMaxIdleTime);
logger.debug(String.format("default ExtraDataSource.maxIdleTime to DB.maxIdleTime [%s]", DatabaseGlobalProperty.DbMaxIdleTime));
}
if (getGlobalProperty("RESTApiDataSource.maxIdleTime") == null) {
System.setProperty("RESTApiDataSource.maxIdleTime", DatabaseGlobalProperty.DbMaxIdleTime);
logger.debug(String.format("default RESTApiDataSource.maxIdleTime to DB.maxIdleTime [%s]", DatabaseGlobalProperty.DbMaxIdleTime));
}
}
if (DatabaseGlobalProperty.DbIdleConnectionTestPeriod != null) {
if (getGlobalProperty("DbFacadeDataSource.idleConnectionTestPeriod") == null) {
System.setProperty("DbFacadeDataSource.idleConnectionTestPeriod", DatabaseGlobalProperty.DbIdleConnectionTestPeriod);
logger.debug(String.format("default DbFacadeDataSource.idleConnectionTestPeriod to DB.idleConnectionTestPeriod [%s]", DatabaseGlobalProperty.DbIdleConnectionTestPeriod));
}
if (getGlobalProperty("ExtraDataSource.idleConnectionTestPeriod") == null) {
System.setProperty("ExtraDataSource.idleConnectionTestPeriod", DatabaseGlobalProperty.DbIdleConnectionTestPeriod);
logger.debug(String.format("default ExtraDataSource.idleConnectionTestPeriod to DB.idleConnectionTestPeriod [%s]", DatabaseGlobalProperty.DbIdleConnectionTestPeriod));
}
if (getGlobalProperty("RESTApiDataSource.idleConnectionTestPeriod") == null) {
System.setProperty("RESTApiDataSource.idleConnectionTestPeriod", DatabaseGlobalProperty.DbIdleConnectionTestPeriod);
logger.debug(String.format("default RESTApiDataSource.idleConnectionTestPeriod to DB.idleConnectionTestPeriod [%s]", DatabaseGlobalProperty.DbIdleConnectionTestPeriod));
}
}
}
static {
try {
msId = getUuid();
reflections = new Reflections(ClasspathHelper.forPackage("org.zstack"),
new SubTypesScanner(), new MethodAnnotationsScanner(), new FieldAnnotationsScanner(),
new MemberUsageScanner(), new MethodParameterNamesScanner(), new ResourcesScanner(),
new TypeAnnotationsScanner(), new TypeElementsScanner(), new MethodParameterScanner());
// TODO: get code version from MANIFEST file
codeVersion = "0.1.0";
File globalPropertiesFile = PathUtil.findFileOnClassPath("zstack.properties", true);
FileInputStream in = new FileInputStream(globalPropertiesFile);
System.getProperties().load(in);
linkGlobalProperty();
prepareDefaultDbProperties();
callStaticInitMethods();
encryptedMethodsMap = getAllEncryptPassword();
writePidFile();
} catch (Throwable e) {
logger.warn(String.format("unhandled exception when in Platform's static block, %s", e.getMessage()), e);
new BootErrorLog().write(e.getMessage());
if (CoreGlobalProperty.EXIT_JVM_ON_BOOT_FAILURE) {
System.exit(1);
} else {
throw new RuntimeException(e);
}
}
}
private static Set<Method> getAllEncryptPassword() {
Set<Method> encrypteds = reflections.getMethodsAnnotatedWith(ENCRYPT.class);
for (Method encrypted: encrypteds) {
logger.debug(String.format("found encrypted method[%s:%s]", encrypted.getDeclaringClass(), encrypted.getName()));
}
return encrypteds;
}
private static void callStaticInitMethods() throws InvocationTargetException, IllegalAccessException {
Set<Method> inits = reflections.getMethodsAnnotatedWith(StaticInit.class);
for (Method init : inits) {
if (!Modifier.isStatic(init.getModifiers())) {
throw new CloudRuntimeException(String.format("the method[%s:%s] annotated by @StaticInit is not a static method", init.getDeclaringClass(), init.getName()));
}
logger.debug(String.format("calling static init method[%s:%s]", init.getDeclaringClass(), init.getName()));
init.setAccessible(true);
init.invoke(null);
}
}
private static void initMessageSource() {
locale = LocaleUtils.toLocale(CoreGlobalProperty.LOCALE);
logger.debug(String.format("using locale[%s] for i18n logging messages", locale.toString()));
if (loader == null) {
throw new CloudRuntimeException("ComponentLoader is null. i18n has not been initialized, you call it too early");
}
BeanFactory beanFactory = loader.getSpringIoc();
if (beanFactory == null) {
throw new CloudRuntimeException("BeanFactory is null. i18n has not been initialized, you call it too early");
}
if (!(beanFactory instanceof MessageSource)) {
throw new CloudRuntimeException("BeanFactory is not a spring MessageSource. i18n cannot be used");
}
messageSource = (MessageSource)beanFactory;
}
private static CloudBus bus;
{
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
if (bus != null) {
bus.stop();
}
}
}));
}
public static String getGlobalProperty(String name) {
return System.getProperty(name);
}
public static String getGlobalPropertyExceptionOnNull(String name) {
String ret = System.getProperty(name);
if (ret == null) {
throw new IllegalArgumentException(String.format("unable to find global properties[%s], check global.properties", name));
}
return ret;
}
public static Map<String, String> getGlobalPropertiesStartWith(String prefix) {
Properties props = System.getProperties();
Enumeration e = props.propertyNames();
Map<String, String> ret = new HashMap<String, String>();
while (e.hasMoreElements()) {
String key = (String) e.nextElement();
if (key.startsWith(prefix)) {
ret.put(key, System.getProperty(key));
}
}
return ret;
}
public static <T> T getGlobalProperty(String name, Class<T> clazz) {
String ret = System.getProperty(name);
return TypeUtils.stringToValue(ret, clazz);
}
public static <T> T getGlobalProperty(String name, Class<T> clazz, T defaultValue) {
String ret = System.getProperty(name);
if (ret == null) {
return defaultValue;
} else {
return TypeUtils.stringToValue(ret, clazz);
}
}
public static <T> T getGlobalPropertyExceptionOnNull(String name, Class<T> clazz) {
T ret = getGlobalProperty(name, clazz);
if (ret == null) {
throw new IllegalArgumentException(String.format("unable to find global properties[%s], check global.properties", name));
}
return ret;
}
public static ComponentLoader createComponentLoaderFromWebApplicationContext(WebApplicationContext webAppCtx) {
assert loader == null;
try {
if (webAppCtx != null) {
loader = new ComponentLoaderImpl(webAppCtx);
} else {
loader = new ComponentLoaderImpl();
}
} catch (Exception e) {
String err = "unable to create ComponentLoader";
logger.warn(e.getMessage(), e);
throw new CloudRuntimeException(err);
}
loader.getPluginRegistry();
GlobalConfigFacade gcf = loader.getComponent(GlobalConfigFacade.class);
if (gcf != null) {
((Component)gcf).start();
}
bus = loader.getComponentNoExceptionWhenNotExisting(CloudBus.class);
if (bus != null) {
bus.start();
}
initMessageSource();
return loader;
}
public static ComponentLoader getComponentLoader() {
/*
* This part cannot be moved to static block at the beginning.
* Because component code loaded by Spring may call other functions in Platform which
* causes the static block to be executed, which results in cycle initialization of ComponentLoaderImpl.
*/
if (loader == null) {
loader = createComponentLoaderFromWebApplicationContext(null);
}
return loader;
}
public static String getManagementServerId() {
return msId;
}
public static <K extends Enum<K>, T extends Enum<T>> StateMachine<K, T> createStateMachine() {
return new StateMachineImpl<K, T>();
}
public static String getCodeVersion() {
return codeVersion;
}
public static String getUuid() {
return UUID.randomUUID().toString().replace("-", "");
}
public static String getManagementCidr() {
if (managementCidr != null) {
return managementCidr;
}
String mgmtIp = getManagementServerIp();
managementCidr = ShellUtils.run(String.format("ip addr | grep -w %s | awk '{print $2}'", mgmtIp));
managementCidr = StringDSL.stripEnd(managementCidr, "\n");
if (!NetworkUtils.isCidr(managementCidr)) {
throw new CloudRuntimeException(String.format("got an invalid management CIDR[%s]", managementCidr));
}
return managementCidr;
}
public static String getManagementServerIp() {
if (managementServerIp != null) {
return managementServerIp;
}
String ip = System.getProperty("management.server.ip");
if (ip != null) {
logger.info(String.format("get management IP[%s] from Java property[management.server.ip]", ip));
return ip;
}
ip = System.getenv("ZSTACK_MANAGEMENT_SERVER_IP");
if (ip != null) {
logger.info(String.format("get management IP[%s] from environment variable[ZSTACK_MANAGEMENT_SERVER_IP]", ip));
return ip;
}
Linux.ShellResult ret = Linux.shell("/sbin/ip route");
String defaultLine = null;
for (String s : ret.getStdout().split("\n")) {
if (s.contains("default via")) {
defaultLine = s;
break;
}
}
String err = "cannot get management server ip of this machine. there are three ways to get the ip.\n1) search for 'management.server.ip' java property\n2) search for 'ZSTACK_MANAGEMENT_SERVER_IP' environment variable\n3) search for default route printed out by '/sbin/ip route'\nhowever, all above methods failed";
if (defaultLine == null) {
throw new CloudRuntimeException(err);
}
try {
Enumeration<NetworkInterface> nets = NetworkInterface.getNetworkInterfaces();
for (NetworkInterface iface : Collections.list(nets)) {
String name = iface.getName();
if (defaultLine.contains(name)) {
InetAddress ia = iface.getInetAddresses().nextElement();
ip = ia.getHostAddress();
break;
}
}
} catch (SocketException e) {
throw new CloudRuntimeException(e);
}
if (ip == null) {
throw new CloudRuntimeException(err);
}
logger.info(String.format("get management IP[%s] from default route[/sbin/ip route]", ip));
managementServerIp = ip;
return managementServerIp;
}
public static String toI18nString(String code, Object... args) {
return toI18nString(code, null, args);
}
public static String toI18nString(String code, Locale l, List args) {
return toI18nString(code, l, args.toArray(new Object[args.size()]));
}
public static String toI18nString(String code, Locale l, Object...args) {
l = l == null ? locale : l;
try {
String ret;
if (args.length > 0) {
ret = messageSource.getMessage(code, args, l);
} else {
ret = messageSource.getMessage(code, null, l);
}
// if the result is an empty string which means the string is not translated in the locale,
// return the original string so users won't get a confusing, empty string
return ret.isEmpty() ? String.format(code, args) : ret;
} catch (NoSuchMessageException e) {
return String.format(code, args);
}
}
public static String i18n(String str, Object...args) {
if (args != null) {
return String.format(str, args);
} else {
return str;
}
}
public static boolean killProcess(int pid) {
return killProcess(pid, 15);
}
public static boolean killProcess(int pid, Integer timeout) {
timeout = timeout == null ? 30 : timeout;
if (!TimeUtils.loopExecuteUntilTimeoutIgnoreExceptionAndReturn(timeout, 1, TimeUnit.SECONDS, () -> {
ShellUtils.runAndReturn(String.format("kill %s", pid));
return !new ProcessFinder().processExists(pid);
})) {
logger.warn(String.format("cannot kill the process[PID:%s] after %s seconds, kill -9 it", pid, timeout));
ShellUtils.runAndReturn(String.format("kill -9 %s", pid));
}
if (!TimeUtils.loopExecuteUntilTimeoutIgnoreExceptionAndReturn(5, 1, TimeUnit.SECONDS, () -> !new ProcessFinder().processExists(pid))) {
logger.warn(String.format("FAILED TO KILL -9 THE PROCESS[PID:%s], THE KERNEL MUST HAVE SOMETHING RUN", pid));
return false;
} else {
return true;
}
}
public static ErrorCode err(Enum errCode, String fmt, Object...args) {
ErrorFacade errf = getComponentLoader().getComponent(ErrorFacade.class);
if (SysErrors.INTERNAL == errCode) {
return errf.instantiateErrorCode(errCode, String.format(fmt, args));
} else {
return errf.instantiateErrorCode(errCode, i18n(fmt, args));
}
}
public static ErrorCode inerr(String fmt, Object...args) {
return err(SysErrors.INTERNAL, fmt, args);
}
public static ErrorCode operr(String fmt, Object...args) {
return err(SysErrors.OPERATION_ERROR, fmt, args);
}
public static ErrorCode argerr(String fmt, Object...args) {
return err(SysErrors.INVALID_ARGUMENT_ERROR, fmt, args);
}
public static ErrorCode ioerr(String fmt, Object...args) {
return err(SysErrors.IO_ERROR, fmt, args);
}
public static ErrorCode httperr(String fmt, Object...args) {
return err(SysErrors.HTTP_ERROR, fmt, args);
}
}
| apache-2.0 |
maheshika/carbon-analytics | components/bam-data-agents/org.wso2.carbon.bam.jmx.agent/src/main/java/org/wso2/carbon/bam/jmx/agent/JmxConstant.java | 950 | /*
* Copyright (c) WSO2 Inc. (http://wso2.com) All Rights Reserved.
WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.carbon.bam.jmx.agent;
public class JmxConstant {
public static final String JMX_REMOTE_CREDENTIALS_STR = "jmx.remote.credentials";
public static final String JMX_SERVICE_TASK_TYPE = "JMX_SERVICE_TASK";
public static String JMX_PROFILE_NAME = "JMX_PROFILE_NAME";
}
| apache-2.0 |
play1-maven-plugin/play1-maven-test-projects | external-modules/siena/app/models/TransactionAccountToModel.java | 2036 | /*
* Copyright 2011 Pascal <pascal.voitot@mandubian.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models;
import siena.Generator;
import siena.Id;
import siena.Model;
import siena.Query;
import siena.Table;
import siena.core.batch.Batch;
@Table("transaction_to")
public class TransactionAccountToModel extends Model {
@Id(Generator.AUTO_INCREMENT)
public Long id;
public Long amount;
public TransactionAccountToModel() {
}
public TransactionAccountToModel(Long amount) {
this.amount = amount;
}
public Query<TransactionAccountToModel> all() {
return Model.all(TransactionAccountToModel.class);
}
public Batch<TransactionAccountFromModel> batch() {
return Model.batch(TransactionAccountFromModel.class);
}
public String toString() {
return "id: "+id+", amount: "+amount;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((amount == null) ? 0 : amount.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TransactionAccountToModel other = (TransactionAccountToModel) obj;
if (amount == null) {
if (other.amount != null)
return false;
}
if(!amount.equals(other.amount))
return false;
return true;
}
public boolean isOnlyIdFilled() {
if(this.id != null
&& this.amount == null
) return true;
return false;
}
}
| apache-2.0 |
456838/usefulCode | YHamburgGit/app/src/main/java/com/salton123/hamb/model/bean/CriteriaSearchRet.java | 2798 | package com.salton123.hamb.model.bean;
/**
* User: 巫金生(newSalton@outlook.com)
* Date: 2017/2/4 23:54
* Time: 23:54
* Description:
*/
public class CriteriaSearchRet {
/**
* id : 27615
* title : 80经典电视剧《声音盛宴》
* category : 剧情歌
* author : 深山老怪
* cover : http://yycloudvod1285246627.bs2dl.yy.com/dmViMzczOTk3NzRmODY5NTFkYzhiYTcwYzYzZWJmNjljMTU5NTYxNDU4OQ
* digest : 该剧本根据80/90年代的经典电视剧合集创作而成,希望能给大家带来童年的回忆。
* createdat : 2017-02-04 21:04:43
* updatedat : 2017-02-04 22:01:46
* length : 3223
* maleRoleCount : 24
* femaleRoleCount : 11
*/
private String id;
private String title;
private String category;
private String author;
private String cover;
private String digest;
private String createdat;
private String updatedat;
private int length;
private int maleRoleCount;
private int femaleRoleCount;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public String getCover() {
return cover;
}
public void setCover(String cover) {
this.cover = cover;
}
public String getDigest() {
return digest;
}
public void setDigest(String digest) {
this.digest = digest;
}
public String getCreatedat() {
return createdat;
}
public void setCreatedat(String createdat) {
this.createdat = createdat;
}
public String getUpdatedat() {
return updatedat;
}
public void setUpdatedat(String updatedat) {
this.updatedat = updatedat;
}
public int getLength() {
return length;
}
public void setLength(int length) {
this.length = length;
}
public int getMaleRoleCount() {
return maleRoleCount;
}
public void setMaleRoleCount(int maleRoleCount) {
this.maleRoleCount = maleRoleCount;
}
public int getFemaleRoleCount() {
return femaleRoleCount;
}
public void setFemaleRoleCount(int femaleRoleCount) {
this.femaleRoleCount = femaleRoleCount;
}
}
| apache-2.0 |
breakpoint-au/Hedron | hedron-core/src/main/java/au/com/breakpoint/hedron/core/context/ITransactionScopedService.java | 1632 | // __________________________________
// ______| Copyright 2008-2015 |______
// \ | Breakpoint Pty Limited | /
// \ | http://www.breakpoint.com.au | /
// / |__________________________________| \
// /_________/ \_________\
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing
// permissions and limitations under the License.
//
package au.com.breakpoint.hedron.core.context;
/**
* Basic signature of business services executed inside of a transaction scope.
*
* @param <TOutput>
* Type of the output data returned by the business service. By convention, use
* Void if no output data is required, and return null.
*/
public interface ITransactionScopedService<TOutput>
{
/**
* The business service method.
*
* @param scope
* The active transaction scope.
* @return Output data returned by the business service. By convention, if no output
* data is required, return null.
*/
TOutput execute (final ITransactionScope scope);
}
| apache-2.0 |
cutoutsy/leetcode | hard/problem354/Solution.java | 1155 | class Solution {
public static class Dot {
public int w;
public int h;
public Dot(int weight, int hight) {
w = weight;
h = hight;
}
}
public static class DotComparator implements Comparator<Dot> {
@Override
public int compare(Dot arg0, Dot arg1) {
if (arg0.w != arg1.w) {
return arg0.w - arg1.w;
} else {
return arg1.h - arg0.h;
}
}
}
public int maxEnvelopes(int[][] envelopes) {
if (envelopes == null || envelopes.length == 0 || envelopes[0] == null || envelopes[0].length != 2) {
return 0;
}
Dot[] dots = new Dot[envelopes.length];
for (int i = 0; i < envelopes.length; i++) {
dots[i] = new Dot(envelopes[i][0], envelopes[i][1]);
}
Arrays.sort(dots, new DotComparator());
int[] ends = new int[envelopes.length];
ends[0] = dots[0].h;
int right = 0;
int l = 0;
int r = 0;
int m = 0;
for (int i = 1; i < dots.length; i++) {
l = 0;
r = right;
while (l <= r) {
m = (l + r) / 2;
if (dots[i].h > ends[m]) {
l = m + 1;
} else {
r = m - 1;
}
}
right = Math.max(right, l);
ends[l] = dots[i].h;
}
return right + 1;
}
}
| apache-2.0 |
GwtMaterialDesign/gwt-material-demo | src/main/java/gwt/material/design/demo/client/application/style/themes/ThemesView.java | 1139 | package gwt.material.design.demo.client.application.style.themes;
/*
* #%L
* GwtMaterial
* %%
* Copyright (C) 2015 - 2016 GwtMaterialDesign
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.user.client.ui.Widget;
import com.gwtplatform.mvp.client.ViewImpl;
import javax.inject.Inject;
public class ThemesView extends ViewImpl implements ThemesPresenter.MyView {
interface Binder extends UiBinder<Widget, ThemesView> {
}
@Inject
ThemesView(Binder uiBinder) {
initWidget(uiBinder.createAndBindUi(this));
}
}
| apache-2.0 |
xukun0217/wayMQ | waymq-tools/src/ananas/waymq/tools/counter/DirFinder.java | 2615 | package ananas.waymq.tools.counter;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Properties;
public class DirFinder {
interface PropertiesAcceptor {
boolean accept(Properties properties);
}
private File _result_dir;
public void find() {
File base = this.getBase();
System.out.println("find from base " + base);
File root = this.findUp(".waymq", base, new PropertiesAcceptor() {
@Override
public boolean accept(Properties properties) {
return true;
}
});
File events = this.findDown(".waymq", root.getParentFile(),
new PropertiesAcceptor() {
@Override
public boolean accept(Properties properties) {
String node = properties.getProperty("node", "default");
return node.equals("events");
}
});
System.out.println("find events in dir " + events);
this._result_dir = events.getParentFile();
}
private File findDown(String target, File dir, PropertiesAcceptor acc,
int depthLimit) {
if (depthLimit < 0)
return null;
if (!dir.isDirectory())
return null;
File file = new File(dir, target);
if (file.exists()) {
Properties pro = this.loadProperties(file);
if (acc.accept(pro))
return file;
}
// find into dir
File[] list = dir.listFiles();
for (File ch : list) {
if (ch.isDirectory()) {
File rlt = this.findDown(target, ch, acc, depthLimit - 1);
if (rlt != null)
return rlt;
}
}
return null;
}
private File findDown(String target, File root, PropertiesAcceptor acc) {
return this.findDown(target, root, acc, 10);
}
private File findUp(String target, File dir, PropertiesAcceptor acc) {
for (; dir != null; dir = dir.getParentFile()) {
File file = new File(dir, target);
if (file.exists())
if (file.isFile()) {
Properties pro = this.loadProperties(file);
if (acc.accept(pro)) {
return file;
}
}
}
return null;
}
private Properties loadProperties(File file) {
Properties pro = new Properties();
try {
System.out.println("load properties " + file);
InputStream in = new FileInputStream(file);
pro.load(in);
in.close();
} catch (Exception e) {
e.printStackTrace();
}
return pro;
}
private File getBase() {
try {
URI uri = this.getClass().getProtectionDomain().getCodeSource()
.getLocation().toURI();
File file = new File(uri);
return file;
} catch (URISyntaxException e) {
e.printStackTrace();
}
return null;
}
public File getResultDir() {
return this._result_dir;
}
}
| apache-2.0 |
openwide-java/owsi-core-parent | owsi-core/owsi-core-examples/wicket-showcase/wicket-showcase-core/src/main/java/fr/openwide/core/showcase/core/util/binding/Bindings.java | 615 | package fr.openwide.core.showcase.core.util.binding;
import fr.openwide.core.showcase.core.business.task.model.ShowcaseBatchReportBeanBinding;
import fr.openwide.core.showcase.core.business.user.model.UserBinding;
public final class Bindings {
private Bindings() { }
private static final UserBinding USER = new UserBinding();
private static final ShowcaseBatchReportBeanBinding SHOWCASE_BATCH_REPORT = new ShowcaseBatchReportBeanBinding();
public static UserBinding user() {
return USER;
}
public static ShowcaseBatchReportBeanBinding showcaseBatchReport() {
return SHOWCASE_BATCH_REPORT;
}
}
| apache-2.0 |
qtproject/qtqa-gerrit | javatests/com/google/gerrit/acceptance/server/change/ConsistencyCheckerIT.java | 32802 | // Copyright (C) 2014 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.acceptance.server.change;
import static com.google.common.truth.Truth.assertThat;
import static com.google.gerrit.extensions.common.ProblemInfo.Status.FIXED;
import static com.google.gerrit.extensions.common.ProblemInfo.Status.FIX_FAILED;
import static com.google.gerrit.testing.TestChanges.newPatchSet;
import static java.util.Objects.requireNonNull;
import com.google.gerrit.acceptance.AbstractDaemonTest;
import com.google.gerrit.acceptance.NoHttpd;
import com.google.gerrit.acceptance.TestAccount;
import com.google.gerrit.common.FooterConstants;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.extensions.api.changes.FixInput;
import com.google.gerrit.extensions.client.ChangeStatus;
import com.google.gerrit.extensions.common.ChangeInfo;
import com.google.gerrit.extensions.common.ProblemInfo;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.server.ChangeUtil;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.change.ChangeInserter;
import com.google.gerrit.server.change.ConsistencyChecker;
import com.google.gerrit.server.change.NotifyResolver;
import com.google.gerrit.server.change.PatchSetInserter;
import com.google.gerrit.server.notedb.ChangeNoteUtil;
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gerrit.server.notedb.Sequences;
import com.google.gerrit.server.update.BatchUpdate;
import com.google.gerrit.server.update.BatchUpdateOp;
import com.google.gerrit.server.update.ChangeContext;
import com.google.gerrit.server.update.RepoContext;
import com.google.gerrit.server.util.time.TimeUtil;
import com.google.gerrit.testing.TestChanges;
import com.google.inject.Inject;
import com.google.inject.Provider;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.eclipse.jgit.internal.storage.dfs.InMemoryRepository;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.lib.RefUpdate.Result;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.junit.Before;
import org.junit.Test;
@NoHttpd
public class ConsistencyCheckerIT extends AbstractDaemonTest {
@Inject private ChangeNotes.Factory changeNotesFactory;
@Inject private Provider<ConsistencyChecker> checkerProvider;
@Inject private IdentifiedUser.GenericFactory userFactory;
@Inject private ChangeInserter.Factory changeInserterFactory;
@Inject private PatchSetInserter.Factory patchSetInserterFactory;
@Inject private ChangeNoteUtil noteUtil;
@Inject private Sequences sequences;
private RevCommit tip;
private Account.Id adminId;
private ConsistencyChecker checker;
private TestRepository<InMemoryRepository> serverSideTestRepo;
@Before
public void setUp() throws Exception {
serverSideTestRepo =
new TestRepository<>((InMemoryRepository) repoManager.openRepository(project));
tip =
serverSideTestRepo
.getRevWalk()
.parseCommit(serverSideTestRepo.getRepository().exactRef("HEAD").getObjectId());
adminId = admin.id();
checker = checkerProvider.get();
}
@Test
public void validNewChange() throws Exception {
assertNoProblems(insertChange(), null);
}
@Test
public void validMergedChange() throws Exception {
ChangeNotes notes = mergeChange(incrementPatchSet(insertChange()));
assertNoProblems(notes, null);
}
@Test
public void missingOwner() throws Exception {
TestAccount owner = accountCreator.create("missing");
ChangeNotes notes = insertChange(owner);
deleteUserBranch(owner.id());
assertProblems(notes, null, problem("Missing change owner: " + owner.id()));
}
// No test for ref existing but object missing; InMemoryRepository won't let
// us do such a thing.
@Test
public void patchSetObjectAndRefMissing() throws Exception {
String rev = "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef";
ChangeNotes notes = insertChange();
PatchSet ps = insertMissingPatchSet(notes, rev);
notes = reload(notes);
assertProblems(
notes,
null,
problem("Ref missing: " + ps.getId().toRefName()),
problem("Object missing: patch set 2: deadbeefdeadbeefdeadbeefdeadbeefdeadbeef"));
}
@Test
public void patchSetObjectAndRefMissingWithFix() throws Exception {
String rev = "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef";
ChangeNotes notes = insertChange();
PatchSet ps = insertMissingPatchSet(notes, rev);
notes = reload(notes);
String refName = ps.getId().toRefName();
assertProblems(
notes,
new FixInput(),
problem("Ref missing: " + refName),
problem("Object missing: patch set 2: " + rev));
}
@Test
public void patchSetRefMissing() throws Exception {
ChangeNotes notes = insertChange();
serverSideTestRepo.update(
"refs/other/foo", ObjectId.fromString(psUtil.current(notes).getRevision().get()));
String refName = notes.getChange().currentPatchSetId().toRefName();
deleteRef(refName);
assertProblems(notes, null, problem("Ref missing: " + refName));
}
@Test
public void patchSetRefMissingWithFix() throws Exception {
ChangeNotes notes = insertChange();
String rev = psUtil.current(notes).getRevision().get();
serverSideTestRepo.update("refs/other/foo", ObjectId.fromString(rev));
String refName = notes.getChange().currentPatchSetId().toRefName();
deleteRef(refName);
assertProblems(
notes, new FixInput(), problem("Ref missing: " + refName, FIXED, "Repaired patch set ref"));
assertThat(serverSideTestRepo.getRepository().exactRef(refName).getObjectId().name())
.isEqualTo(rev);
}
@Test
public void patchSetObjectAndRefMissingWithDeletingPatchSet() throws Exception {
ChangeNotes notes = insertChange();
PatchSet ps1 = psUtil.current(notes);
String rev2 = "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef";
PatchSet ps2 = insertMissingPatchSet(notes, rev2);
notes = reload(notes);
FixInput fix = new FixInput();
fix.deletePatchSetIfCommitMissing = true;
assertProblems(
notes,
fix,
problem("Ref missing: " + ps2.getId().toRefName()),
problem("Object missing: patch set 2: " + rev2, FIXED, "Deleted patch set"));
notes = reload(notes);
assertThat(notes.getChange().currentPatchSetId().get()).isEqualTo(1);
assertThat(psUtil.get(notes, ps1.getId())).isNotNull();
assertThat(psUtil.get(notes, ps2.getId())).isNull();
}
@Test
public void patchSetMultipleObjectsMissingWithDeletingPatchSets() throws Exception {
ChangeNotes notes = insertChange();
PatchSet ps1 = psUtil.current(notes);
String rev2 = "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef";
PatchSet ps2 = insertMissingPatchSet(notes, rev2);
notes = incrementPatchSet(reload(notes));
PatchSet ps3 = psUtil.current(notes);
String rev4 = "c0ffeeeec0ffeeeec0ffeeeec0ffeeeec0ffeeee";
PatchSet ps4 = insertMissingPatchSet(notes, rev4);
notes = reload(notes);
FixInput fix = new FixInput();
fix.deletePatchSetIfCommitMissing = true;
assertProblems(
notes,
fix,
problem("Ref missing: " + ps2.getId().toRefName()),
problem("Object missing: patch set 2: " + rev2, FIXED, "Deleted patch set"),
problem("Ref missing: " + ps4.getId().toRefName()),
problem("Object missing: patch set 4: " + rev4, FIXED, "Deleted patch set"));
notes = reload(notes);
assertThat(notes.getChange().currentPatchSetId().get()).isEqualTo(3);
assertThat(psUtil.get(notes, ps1.getId())).isNotNull();
assertThat(psUtil.get(notes, ps2.getId())).isNull();
assertThat(psUtil.get(notes, ps3.getId())).isNotNull();
assertThat(psUtil.get(notes, ps4.getId())).isNull();
}
@Test
public void onlyPatchSetObjectMissingWithFix() throws Exception {
Change c = TestChanges.newChange(project, admin.id(), sequences.nextChangeId());
PatchSet.Id psId = c.currentPatchSetId();
String rev = "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef";
PatchSet ps = newPatchSet(psId, rev, adminId);
addNoteDbCommit(
c.getId(),
"Create change\n"
+ "\n"
+ "Patch-set: 1\n"
+ "Branch: "
+ c.getDest().get()
+ "\n"
+ "Change-id: "
+ c.getKey().get()
+ "\n"
+ "Subject: Bogus subject\n"
+ "Commit: "
+ rev
+ "\n"
+ "Groups: "
+ rev
+ "\n");
indexer.index(c.getProject(), c.getId());
ChangeNotes notes = changeNotesFactory.create(c.getProject(), c.getId());
FixInput fix = new FixInput();
fix.deletePatchSetIfCommitMissing = true;
assertProblems(
notes,
fix,
problem("Ref missing: " + ps.getId().toRefName()),
problem(
"Object missing: patch set 1: " + rev,
FIX_FAILED,
"Cannot delete patch set; no patch sets would remain"));
notes = reload(notes);
assertThat(notes.getChange().currentPatchSetId().get()).isEqualTo(1);
assertThat(psUtil.current(notes)).isNotNull();
}
@Test
public void duplicatePatchSetRevisions() throws Exception {
ChangeNotes notes = insertChange();
PatchSet ps1 = psUtil.current(notes);
String rev = ps1.getRevision().get();
notes =
incrementPatchSet(
notes, serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev)));
assertProblems(notes, null, problem("Multiple patch sets pointing to " + rev + ": [1, 2]"));
}
@Test
public void missingDestRef() throws Exception {
ChangeNotes notes = insertChange();
String ref = "refs/heads/master";
// Detach head so we're allowed to delete ref.
serverSideTestRepo.reset(serverSideTestRepo.getRepository().exactRef(ref).getObjectId());
RefUpdate ru = serverSideTestRepo.getRepository().updateRef(ref);
ru.setForceUpdate(true);
assertThat(ru.delete()).isEqualTo(RefUpdate.Result.FORCED);
assertProblems(notes, null, problem("Destination ref not found (may be new branch): " + ref));
}
@Test
public void mergedChangeIsNotMerged() throws Exception {
ChangeNotes notes = insertChange();
try (BatchUpdate bu = newUpdate(adminId)) {
bu.addOp(
notes.getChangeId(),
new BatchUpdateOp() {
@Override
public boolean updateChange(ChangeContext ctx) {
ctx.getChange().setStatus(Change.Status.MERGED);
ctx.getUpdate(ctx.getChange().currentPatchSetId()).fixStatus(Change.Status.MERGED);
return true;
}
});
bu.execute();
}
notes = reload(notes);
String rev = psUtil.current(notes).getRevision().get();
ObjectId tip = getDestRef(notes);
assertProblems(
notes,
null,
problem(
"Patch set 1 ("
+ rev
+ ") is not merged into destination ref"
+ " refs/heads/master ("
+ tip.name()
+ "), but change status is MERGED"));
}
@Test
public void newChangeIsMerged() throws Exception {
ChangeNotes notes = insertChange();
String rev = psUtil.current(notes).getRevision().get();
serverSideTestRepo
.branch(notes.getChange().getDest().get())
.update(serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev)));
assertProblems(
notes,
null,
problem(
"Patch set 1 ("
+ rev
+ ") is merged into destination ref"
+ " refs/heads/master ("
+ rev
+ "), but change status is NEW"));
}
@Test
public void newChangeIsMergedWithFix() throws Exception {
ChangeNotes notes = insertChange();
String rev = psUtil.current(notes).getRevision().get();
serverSideTestRepo
.branch(notes.getChange().getDest().get())
.update(serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev)));
assertProblems(
notes,
new FixInput(),
problem(
"Patch set 1 ("
+ rev
+ ") is merged into destination ref"
+ " refs/heads/master ("
+ rev
+ "), but change status is NEW",
FIXED,
"Marked change as merged"));
notes = reload(notes);
assertThat(notes.getChange().isMerged()).isTrue();
assertNoProblems(notes, null);
}
@Test
public void extensionApiReturnsUpdatedValueAfterFix() throws Exception {
ChangeNotes notes = insertChange();
String rev = psUtil.current(notes).getRevision().get();
serverSideTestRepo
.branch(notes.getChange().getDest().get())
.update(serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev)));
ChangeInfo info = gApi.changes().id(notes.getChangeId().get()).info();
assertThat(info.status).isEqualTo(ChangeStatus.NEW);
info = gApi.changes().id(notes.getChangeId().get()).check(new FixInput());
assertThat(info.status).isEqualTo(ChangeStatus.MERGED);
}
@Test
public void expectedMergedCommitIsLatestPatchSet() throws Exception {
ChangeNotes notes = insertChange();
String rev = psUtil.current(notes).getRevision().get();
serverSideTestRepo
.branch(notes.getChange().getDest().get())
.update(serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev)));
FixInput fix = new FixInput();
fix.expectMergedAs = rev;
assertProblems(
notes,
fix,
problem(
"Patch set 1 ("
+ rev
+ ") is merged into destination ref"
+ " refs/heads/master ("
+ rev
+ "), but change status is NEW",
FIXED,
"Marked change as merged"));
notes = reload(notes);
assertThat(notes.getChange().isMerged()).isTrue();
assertNoProblems(notes, null);
}
@Test
public void expectedMergedCommitNotMergedIntoDestination() throws Exception {
ChangeNotes notes = insertChange();
String rev = psUtil.current(notes).getRevision().get();
RevCommit commit = serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev));
serverSideTestRepo.branch(notes.getChange().getDest().get()).update(commit);
FixInput fix = new FixInput();
RevCommit other = serverSideTestRepo.commit().message(commit.getFullMessage()).create();
fix.expectMergedAs = other.name();
assertProblems(
notes,
fix,
problem(
"Expected merged commit "
+ other.name()
+ " is not merged into destination ref refs/heads/master"
+ " ("
+ commit.name()
+ ")"));
}
@Test
public void createNewPatchSetForExpectedMergeCommitWithNoChangeId() throws Exception {
ChangeNotes notes = insertChange();
String dest = notes.getChange().getDest().get();
String rev = psUtil.current(notes).getRevision().get();
RevCommit commit = serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev));
RevCommit mergedAs =
serverSideTestRepo
.commit()
.parent(commit.getParent(0))
.message(commit.getShortMessage())
.create();
serverSideTestRepo.getRevWalk().parseBody(mergedAs);
assertThat(mergedAs.getFooterLines(FooterConstants.CHANGE_ID)).isEmpty();
serverSideTestRepo.update(dest, mergedAs);
assertNoProblems(notes, null);
FixInput fix = new FixInput();
fix.expectMergedAs = mergedAs.name();
assertProblems(
notes,
fix,
problem(
"No patch set found for merged commit " + mergedAs.name(),
FIXED,
"Marked change as merged"),
problem(
"Expected merged commit " + mergedAs.name() + " has no associated patch set",
FIXED,
"Inserted as patch set 2"));
notes = reload(notes);
PatchSet.Id psId2 = new PatchSet.Id(notes.getChangeId(), 2);
assertThat(notes.getChange().currentPatchSetId()).isEqualTo(psId2);
assertThat(psUtil.get(notes, psId2).getRevision().get()).isEqualTo(mergedAs.name());
assertNoProblems(notes, null);
}
@Test
public void createNewPatchSetForExpectedMergeCommitWithChangeId() throws Exception {
ChangeNotes notes = insertChange();
String dest = notes.getChange().getDest().get();
String rev = psUtil.current(notes).getRevision().get();
RevCommit commit = serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev));
RevCommit mergedAs =
serverSideTestRepo
.commit()
.parent(commit.getParent(0))
.message(
commit.getShortMessage()
+ "\n"
+ "\n"
+ "Change-Id: "
+ notes.getChange().getKey().get()
+ "\n")
.create();
serverSideTestRepo.getRevWalk().parseBody(mergedAs);
assertThat(mergedAs.getFooterLines(FooterConstants.CHANGE_ID))
.containsExactly(notes.getChange().getKey().get());
serverSideTestRepo.update(dest, mergedAs);
assertNoProblems(notes, null);
FixInput fix = new FixInput();
fix.expectMergedAs = mergedAs.name();
assertProblems(
notes,
fix,
problem(
"No patch set found for merged commit " + mergedAs.name(),
FIXED,
"Marked change as merged"),
problem(
"Expected merged commit " + mergedAs.name() + " has no associated patch set",
FIXED,
"Inserted as patch set 2"));
notes = reload(notes);
PatchSet.Id psId2 = new PatchSet.Id(notes.getChangeId(), 2);
assertThat(notes.getChange().currentPatchSetId()).isEqualTo(psId2);
assertThat(psUtil.get(notes, psId2).getRevision().get()).isEqualTo(mergedAs.name());
assertNoProblems(notes, null);
}
@Test
public void expectedMergedCommitIsOldPatchSetOfSameChange() throws Exception {
ChangeNotes notes = insertChange();
PatchSet ps1 = psUtil.current(notes);
String rev1 = ps1.getRevision().get();
notes = incrementPatchSet(notes);
PatchSet ps2 = psUtil.current(notes);
serverSideTestRepo
.branch(notes.getChange().getDest().get())
.update(serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev1)));
FixInput fix = new FixInput();
fix.expectMergedAs = rev1;
assertProblems(
notes,
fix,
problem("No patch set found for merged commit " + rev1, FIXED, "Marked change as merged"),
problem(
"Expected merge commit "
+ rev1
+ " corresponds to patch set 1,"
+ " not the current patch set 2",
FIXED,
"Deleted patch set"),
problem(
"Expected merge commit "
+ rev1
+ " corresponds to patch set 1,"
+ " not the current patch set 2",
FIXED,
"Inserted as patch set 3"));
notes = reload(notes);
PatchSet.Id psId3 = new PatchSet.Id(notes.getChangeId(), 3);
assertThat(notes.getChange().currentPatchSetId()).isEqualTo(psId3);
assertThat(notes.getChange().isMerged()).isTrue();
assertThat(psUtil.byChangeAsMap(notes).keySet()).containsExactly(ps2.getId(), psId3);
assertThat(psUtil.get(notes, psId3).getRevision().get()).isEqualTo(rev1);
}
@Test
public void expectedMergedCommitIsDanglingPatchSetOlderThanCurrent() throws Exception {
ChangeNotes notes = insertChange();
PatchSet ps1 = psUtil.current(notes);
// Create dangling ref so next ID in the database becomes 3.
PatchSet.Id psId2 = new PatchSet.Id(notes.getChangeId(), 2);
RevCommit commit2 = patchSetCommit(psId2);
String rev2 = commit2.name();
serverSideTestRepo.branch(psId2.toRefName()).update(commit2);
notes = incrementPatchSet(notes);
PatchSet ps3 = psUtil.current(notes);
assertThat(ps3.getId().get()).isEqualTo(3);
serverSideTestRepo
.branch(notes.getChange().getDest().get())
.update(serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev2)));
FixInput fix = new FixInput();
fix.expectMergedAs = rev2;
assertProblems(
notes,
fix,
problem("No patch set found for merged commit " + rev2, FIXED, "Marked change as merged"),
problem(
"Expected merge commit "
+ rev2
+ " corresponds to patch set 2,"
+ " not the current patch set 3",
FIXED,
"Deleted patch set"),
problem(
"Expected merge commit "
+ rev2
+ " corresponds to patch set 2,"
+ " not the current patch set 3",
FIXED,
"Inserted as patch set 4"));
notes = reload(notes);
PatchSet.Id psId4 = new PatchSet.Id(notes.getChangeId(), 4);
assertThat(notes.getChange().currentPatchSetId()).isEqualTo(psId4);
assertThat(notes.getChange().isMerged()).isTrue();
assertThat(psUtil.byChangeAsMap(notes).keySet())
.containsExactly(ps1.getId(), ps3.getId(), psId4);
assertThat(psUtil.get(notes, psId4).getRevision().get()).isEqualTo(rev2);
}
@Test
public void expectedMergedCommitIsDanglingPatchSetNewerThanCurrent() throws Exception {
ChangeNotes notes = insertChange();
PatchSet ps1 = psUtil.current(notes);
// Create dangling ref with no patch set.
PatchSet.Id psId2 = new PatchSet.Id(notes.getChangeId(), 2);
RevCommit commit2 = patchSetCommit(psId2);
String rev2 = commit2.name();
serverSideTestRepo.branch(psId2.toRefName()).update(commit2);
serverSideTestRepo
.branch(notes.getChange().getDest().get())
.update(serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev2)));
FixInput fix = new FixInput();
fix.expectMergedAs = rev2;
assertProblems(
notes,
fix,
problem("No patch set found for merged commit " + rev2, FIXED, "Marked change as merged"),
problem(
"Expected merge commit "
+ rev2
+ " corresponds to patch set 2,"
+ " not the current patch set 1",
FIXED,
"Inserted as patch set 2"));
notes = reload(notes);
assertThat(notes.getChange().currentPatchSetId()).isEqualTo(psId2);
assertThat(notes.getChange().isMerged()).isTrue();
assertThat(psUtil.byChangeAsMap(notes).keySet()).containsExactly(ps1.getId(), psId2);
assertThat(psUtil.get(notes, psId2).getRevision().get()).isEqualTo(rev2);
}
@Test
public void expectedMergedCommitWithMismatchedChangeId() throws Exception {
ChangeNotes notes = insertChange();
String dest = notes.getChange().getDest().get();
RevCommit parent = serverSideTestRepo.branch(dest).commit().message("parent").create();
String rev = psUtil.current(notes).getRevision().get();
RevCommit commit = serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev));
serverSideTestRepo.branch(dest).update(commit);
String badId = "I0000000000000000000000000000000000000000";
RevCommit mergedAs =
serverSideTestRepo
.commit()
.parent(parent)
.message(commit.getShortMessage() + "\n\nChange-Id: " + badId + "\n")
.create();
serverSideTestRepo.getRevWalk().parseBody(mergedAs);
assertThat(mergedAs.getFooterLines(FooterConstants.CHANGE_ID)).containsExactly(badId);
serverSideTestRepo.update(dest, mergedAs);
assertNoProblems(notes, null);
FixInput fix = new FixInput();
fix.expectMergedAs = mergedAs.name();
assertProblems(
notes,
fix,
problem(
"Expected merged commit "
+ mergedAs.name()
+ " has Change-Id: "
+ badId
+ ", but expected "
+ notes.getChange().getKey().get()));
}
@Test
public void expectedMergedCommitMatchesMultiplePatchSets() throws Exception {
ChangeNotes notes1 = insertChange();
PatchSet.Id psId1 = psUtil.current(notes1).getId();
String dest = notes1.getChange().getDest().get();
String rev = psUtil.current(notes1).getRevision().get();
RevCommit commit = serverSideTestRepo.getRevWalk().parseCommit(ObjectId.fromString(rev));
serverSideTestRepo.branch(dest).update(commit);
ChangeNotes notes2 = insertChange();
notes2 = incrementPatchSet(notes2, commit);
PatchSet.Id psId2 = psUtil.current(notes2).getId();
ChangeNotes notes3 = insertChange();
notes3 = incrementPatchSet(notes3, commit);
PatchSet.Id psId3 = psUtil.current(notes3).getId();
FixInput fix = new FixInput();
fix.expectMergedAs = commit.name();
assertProblems(
notes1,
fix,
problem(
"Multiple patch sets for expected merged commit "
+ commit.name()
+ ": ["
+ psId1
+ ", "
+ psId2
+ ", "
+ psId3
+ "]"));
}
private BatchUpdate newUpdate(Account.Id owner) {
return batchUpdateFactory.create(project, userFactory.create(owner), TimeUtil.nowTs());
}
private ChangeNotes insertChange() throws Exception {
return insertChange(admin);
}
private ChangeNotes insertChange(TestAccount owner) throws Exception {
return insertChange(owner, "refs/heads/master");
}
private ChangeNotes insertChange(TestAccount owner, String dest) throws Exception {
Change.Id id = new Change.Id(sequences.nextChangeId());
ChangeInserter ins;
try (BatchUpdate bu = newUpdate(owner.id())) {
RevCommit commit = patchSetCommit(new PatchSet.Id(id, 1));
bu.setNotify(NotifyResolver.Result.none());
ins =
changeInserterFactory
.create(id, commit, dest)
.setValidate(false)
.setFireRevisionCreated(false)
.setSendMail(false);
bu.insertChange(ins).execute();
}
return changeNotesFactory.create(project, ins.getChange().getId());
}
private PatchSet.Id nextPatchSetId(ChangeNotes notes) throws Exception {
return ChangeUtil.nextPatchSetId(
serverSideTestRepo.getRepository(), notes.getChange().currentPatchSetId());
}
private ChangeNotes incrementPatchSet(ChangeNotes notes) throws Exception {
return incrementPatchSet(notes, patchSetCommit(nextPatchSetId(notes)));
}
private ChangeNotes incrementPatchSet(ChangeNotes notes, RevCommit commit) throws Exception {
PatchSetInserter ins;
try (BatchUpdate bu = newUpdate(notes.getChange().getOwner())) {
bu.setNotify(NotifyResolver.Result.none());
ins =
patchSetInserterFactory
.create(notes, nextPatchSetId(notes), commit)
.setValidate(false)
.setFireRevisionCreated(false);
bu.addOp(notes.getChangeId(), ins).execute();
}
return reload(notes);
}
private ChangeNotes reload(ChangeNotes notes) throws Exception {
return changeNotesFactory.create(notes.getChange().getProject(), notes.getChangeId());
}
private RevCommit patchSetCommit(PatchSet.Id psId) throws Exception {
RevCommit c = serverSideTestRepo.commit().parent(tip).message("Change " + psId).create();
return serverSideTestRepo.parseBody(c);
}
private PatchSet insertMissingPatchSet(ChangeNotes notes, String rev) throws Exception {
// Don't use BatchUpdate since we're manually updating the meta ref rather
// than using ChangeUpdate.
String subject = "Subject for missing commit";
Change c = new Change(notes.getChange());
PatchSet.Id psId = nextPatchSetId(notes);
c.setCurrentPatchSet(psId, subject, c.getOriginalSubject());
PatchSet ps = newPatchSet(psId, rev, adminId);
addNoteDbCommit(
c.getId(),
"Update patch set "
+ psId.get()
+ "\n"
+ "\n"
+ "Patch-set: "
+ psId.get()
+ "\n"
+ "Commit: "
+ rev
+ "\n"
+ "Subject: "
+ subject
+ "\n");
indexer.index(c.getProject(), c.getId());
return ps;
}
private void deleteRef(String refName) throws Exception {
RefUpdate ru = serverSideTestRepo.getRepository().updateRef(refName, true);
ru.setForceUpdate(true);
assertThat(ru.delete()).isEqualTo(RefUpdate.Result.FORCED);
}
private void addNoteDbCommit(Change.Id id, String commitMessage) throws Exception {
PersonIdent committer = serverIdent.get();
PersonIdent author = noteUtil.newIdent(getAccount(admin.id()), committer.getWhen(), committer);
serverSideTestRepo
.branch(RefNames.changeMetaRef(id))
.commit()
.author(author)
.committer(committer)
.message(commitMessage)
.create();
}
private ObjectId getDestRef(ChangeNotes notes) throws Exception {
return serverSideTestRepo
.getRepository()
.exactRef(notes.getChange().getDest().get())
.getObjectId();
}
private ChangeNotes mergeChange(ChangeNotes notes) throws Exception {
final ObjectId oldId = getDestRef(notes);
final ObjectId newId = ObjectId.fromString(psUtil.current(notes).getRevision().get());
final String dest = notes.getChange().getDest().get();
try (BatchUpdate bu = newUpdate(adminId)) {
bu.addOp(
notes.getChangeId(),
new BatchUpdateOp() {
@Override
public void updateRepo(RepoContext ctx) throws IOException {
ctx.addRefUpdate(oldId, newId, dest);
}
@Override
public boolean updateChange(ChangeContext ctx) {
ctx.getChange().setStatus(Change.Status.MERGED);
ctx.getUpdate(ctx.getChange().currentPatchSetId()).fixStatus(Change.Status.MERGED);
return true;
}
});
bu.execute();
}
return reload(notes);
}
private static ProblemInfo problem(String message) {
ProblemInfo p = new ProblemInfo();
p.message = message;
return p;
}
private static ProblemInfo problem(String message, ProblemInfo.Status status, String outcome) {
ProblemInfo p = problem(message);
p.status = requireNonNull(status);
p.outcome = requireNonNull(outcome);
return p;
}
private void assertProblems(
ChangeNotes notes, @Nullable FixInput fix, ProblemInfo first, ProblemInfo... rest)
throws Exception {
List<ProblemInfo> expected = new ArrayList<>(1 + rest.length);
expected.add(first);
expected.addAll(Arrays.asList(rest));
assertThat(checker.check(notes, fix).problems()).containsExactlyElementsIn(expected).inOrder();
}
private void assertNoProblems(ChangeNotes notes, @Nullable FixInput fix) throws Exception {
assertThat(checker.check(notes, fix).problems()).isEmpty();
}
private void deleteUserBranch(Account.Id accountId) throws IOException {
try (Repository repo = repoManager.openRepository(allUsers)) {
String refName = RefNames.refsUsers(accountId);
Ref ref = repo.exactRef(refName);
if (ref == null) {
return;
}
RefUpdate ru = repo.updateRef(refName);
ru.setExpectedOldObjectId(ref.getObjectId());
ru.setNewObjectId(ObjectId.zeroId());
ru.setForceUpdate(true);
Result result = ru.delete();
if (result != Result.FORCED) {
throw new IOException(String.format("Failed to delete ref %s: %s", refName, result.name()));
}
}
}
}
| apache-2.0 |
yatatsu/conference-app-2017 | app/src/main/java/io/github/droidkaigi/confsched2017/repository/sessions/SessionsRepository.java | 3467 | package io.github.droidkaigi.confsched2017.repository.sessions;
import android.support.annotation.VisibleForTesting;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.inject.Singleton;
import io.github.droidkaigi.confsched2017.model.Session;
import io.reactivex.Maybe;
import io.reactivex.Single;
@Singleton
public class SessionsRepository implements SessionsDataSource {
private final SessionsLocalDataSource localDataSource;
private final SessionsRemoteDataSource remoteDataSource;
@VisibleForTesting Map<Integer, Session> cachedSessions;
private boolean isDirty;
@Inject
public SessionsRepository(SessionsLocalDataSource localDataSource, SessionsRemoteDataSource remoteDataSource) {
this.localDataSource = localDataSource;
this.remoteDataSource = remoteDataSource;
this.cachedSessions = new LinkedHashMap<>();
this.isDirty = true;
}
@Override
public Single<List<Session>> findAll(String languageId) {
if (hasCacheSessions()) {
return Single.create(emitter -> {
emitter.onSuccess(new ArrayList<>(cachedSessions.values()));
});
}
if (isDirty) {
return findAllFromRemote(languageId);
} else {
return findAllFromLocal(languageId);
}
}
@Override
public Maybe<Session> find(int sessionId, String languageId) {
if (hasCacheSession(sessionId)) {
return Maybe.create(emitter -> {
emitter.onSuccess(cachedSessions.get(sessionId));
});
}
if (isDirty) {
return remoteDataSource.find(sessionId, languageId);
} else {
return localDataSource.find(sessionId, languageId);
}
}
@Override
public void updateAllAsync(List<Session> sessions) {
localDataSource.updateAllAsync(sessions);
}
private Single<List<Session>> findAllFromLocal(String languageId) {
return localDataSource.findAll(languageId)
.flatMap(sessions -> {
if (sessions.isEmpty()) {
return findAllFromRemote(languageId);
} else {
refreshCache(sessions);
return Single.create(emitter -> emitter.onSuccess(sessions));
}
});
}
private Single<List<Session>> findAllFromRemote(String languageId) {
return remoteDataSource.findAll(languageId)
.map(sessions -> {
refreshCache(sessions);
updateAllAsync(sessions);
return sessions;
});
}
private void refreshCache(List<Session> sessions) {
if (cachedSessions == null) {
cachedSessions = new LinkedHashMap<>();
}
cachedSessions.clear();
for (Session session : sessions) {
cachedSessions.put(session.id, session);
}
isDirty = false;
}
public void setIdDirty(boolean isDirty) {
this.isDirty = isDirty;
}
boolean hasCacheSessions() {
return cachedSessions != null && !cachedSessions.isEmpty() && !isDirty;
}
boolean hasCacheSession(int sessionId) {
return cachedSessions != null && cachedSessions.containsKey(sessionId) && !isDirty;
}
}
| apache-2.0 |
cipriancraciun/mosaic-java-benchmarks | benchmarks-core/src/main/java/eu/mosaic_cloud/benchmarks/tools/GsonEncoder.java | 200 |
package eu.mosaic_cloud.benchmarks.tools;
import com.google.gson.JsonObject;
public interface GsonEncoder<_Object_ extends Object>
{
public abstract JsonObject encode (final _Object_ object);
}
| apache-2.0 |
leancloud/zeppelin | spark/src/main/java/com/nflabs/zeppelin/spark/SparkInterpreter.java | 20845 | package com.nflabs.zeppelin.spark;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.SparkEnv;
import org.apache.spark.repl.SparkCommandLine;
import org.apache.spark.repl.SparkILoop;
import org.apache.spark.repl.SparkIMain;
import org.apache.spark.repl.SparkJLineCompletion;
import org.apache.spark.scheduler.ActiveJob;
import org.apache.spark.scheduler.DAGScheduler;
import org.apache.spark.scheduler.Pool;
import org.apache.spark.scheduler.Stage;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.hive.HiveContext;
import org.apache.spark.ui.jobs.JobProgressListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Console;
import scala.Enumeration.Value;
import scala.None;
import scala.Some;
import scala.Tuple2;
import scala.collection.Iterator;
import scala.collection.JavaConversions;
import scala.collection.JavaConverters;
import scala.collection.mutable.HashMap;
import scala.collection.mutable.HashSet;
import scala.tools.nsc.Settings;
import scala.tools.nsc.interpreter.Completion.Candidates;
import scala.tools.nsc.interpreter.Completion.ScalaCompleter;
import scala.tools.nsc.settings.MutableSettings.BooleanSetting;
import scala.tools.nsc.settings.MutableSettings.PathSetting;
import com.nflabs.zeppelin.interpreter.Interpreter;
import com.nflabs.zeppelin.interpreter.InterpreterContext;
import com.nflabs.zeppelin.interpreter.InterpreterGroup;
import com.nflabs.zeppelin.interpreter.InterpreterPropertyBuilder;
import com.nflabs.zeppelin.interpreter.InterpreterResult;
import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
import com.nflabs.zeppelin.interpreter.WrappedInterpreter;
import com.nflabs.zeppelin.notebook.form.Setting;
import com.nflabs.zeppelin.scheduler.Scheduler;
import com.nflabs.zeppelin.scheduler.SchedulerFactory;
import com.nflabs.zeppelin.spark.dep.DependencyContext;
import com.nflabs.zeppelin.spark.dep.DependencyResolver;
/**
* Spark interpreter for Zeppelin.
*
*/
public class SparkInterpreter extends Interpreter {
Logger logger = LoggerFactory.getLogger(SparkInterpreter.class);
static {
Interpreter.register(
"spark",
"spark",
SparkInterpreter.class.getName(),
new InterpreterPropertyBuilder()
.add("spark.app.name", "Zeppelin", "The name of spark application.")
.add("master",
getSystemDefault("MASTER", "spark.master", "local[*]"),
"Spark master uri. ex) spark://masterhost:7077")
.add("spark.executor.memory",
getSystemDefault(null, "spark.executor.memory", "512m"),
"Executor memory per worker instance. ex) 512m, 32g")
.add("spark.cores.max",
getSystemDefault(null, "spark.cores.max", ""),
"Total number of cores to use. Empty value uses all available core.")
.add("args", "", "spark commandline args").build());
}
private ZeppelinContext z;
private SparkILoop interpreter;
private SparkIMain intp;
private SparkContext sc;
private ByteArrayOutputStream out;
private SQLContext sqlc;
private HiveContext hiveContext;
private DependencyResolver dep;
private SparkJLineCompletion completor;
private JobProgressListener sparkListener;
private Map<String, Object> binder;
private SparkEnv env;
public SparkInterpreter(Properties property) {
super(property);
out = new ByteArrayOutputStream();
}
public SparkInterpreter(Properties property, SparkContext sc) {
this(property);
this.sc = sc;
env = SparkEnv.get();
sparkListener = setupListeners(this.sc);
}
public synchronized SparkContext getSparkContext() {
if (sc == null) {
sc = createSparkContext();
env = SparkEnv.get();
sparkListener = setupListeners(sc);
}
return sc;
}
public boolean isSparkContextInitialized() {
return sc != null;
}
private static JobProgressListener setupListeners(SparkContext context) {
JobProgressListener pl = new JobProgressListener(context.getConf());
context.listenerBus().addListener(pl);
return pl;
}
public SQLContext getSQLContext() {
if (sqlc == null) {
sqlc = new SQLContext(getSparkContext());
}
return sqlc;
}
public HiveContext getHiveContext() {
if (hiveContext == null) {
hiveContext = new HiveContext(getSparkContext());
}
return hiveContext;
}
public DependencyResolver getDependencyResolver() {
if (dep == null) {
dep = new DependencyResolver(intp, sc);
}
return dep;
}
private DepInterpreter getDepInterpreter() {
InterpreterGroup intpGroup = getInterpreterGroup();
if (intpGroup == null) return null;
for (Interpreter intp : intpGroup) {
if (intp.getClassName().equals(DepInterpreter.class.getName())) {
Interpreter p = intp;
while (p instanceof WrappedInterpreter) {
p = ((WrappedInterpreter) p).getInnerInterpreter();
}
return (DepInterpreter) p;
}
}
return null;
}
public SparkContext createSparkContext() {
System.err.println("------ Create new SparkContext " + getProperty("master") + " -------");
String execUri = System.getenv("SPARK_EXECUTOR_URI");
String[] jars = SparkILoop.getAddedJars();
SparkConf conf =
new SparkConf()
.setMaster(getProperty("master"))
.setAppName(getProperty("spark.app.name"))
.setJars(jars)
.set("spark.repl.class.uri", interpreter.intp().classServer().uri());
if (execUri != null) {
conf.set("spark.executor.uri", execUri);
}
if (System.getenv("SPARK_HOME") != null) {
conf.setSparkHome(System.getenv("SPARK_HOME"));
}
conf.set("spark.scheduler.mode", "FAIR");
Properties intpProperty = getProperty();
for (Object k : intpProperty.keySet()) {
String key = (String) k;
if (key.startsWith("spark.")) {
Object value = intpProperty.get(key);
if (value != null
&& value instanceof String
&& !((String) value).trim().isEmpty()) {
conf.set(key, (String) value);
}
}
}
SparkContext sparkContext = new SparkContext(conf);
return sparkContext;
}
private static String getSystemDefault(
String envName,
String propertyName,
String defaultValue) {
if (envName != null && !envName.isEmpty()) {
String envValue = System.getenv().get(envName);
if (envValue != null) {
return envValue;
}
}
if (propertyName != null && !propertyName.isEmpty()) {
String propValue = System.getProperty(propertyName);
if (propValue != null) {
return propValue;
}
}
return defaultValue;
}
@Override
public void open() {
URL[] urls = getClassloaderUrls();
// Very nice discussion about how scala compiler handle classpath
// https://groups.google.com/forum/#!topic/scala-user/MlVwo2xCCI0
/*
* > val env = new nsc.Settings(errLogger) > env.usejavacp.value = true > val p = new
* Interpreter(env) > p.setContextClassLoader > Alternatively you can set the class path through
* nsc.Settings.classpath.
*
* >> val settings = new Settings() >> settings.usejavacp.value = true >>
* settings.classpath.value += File.pathSeparator + >> System.getProperty("java.class.path") >>
* val in = new Interpreter(settings) { >> override protected def parentClassLoader =
* getClass.getClassLoader >> } >> in.setContextClassLoader()
*/
Settings settings = new Settings();
if (getProperty("args") != null) {
String[] argsArray = getProperty("args").split(" ");
LinkedList<String> argList = new LinkedList<String>();
for (String arg : argsArray) {
argList.add(arg);
}
SparkCommandLine command =
new SparkCommandLine(scala.collection.JavaConversions.asScalaBuffer(
argList).toList());
settings = command.settings();
}
// set classpath for scala compiler
PathSetting pathSettings = settings.classpath();
String classpath = "";
List<File> paths = currentClassPath();
for (File f : paths) {
if (classpath.length() > 0) {
classpath += File.pathSeparator;
}
classpath += f.getAbsolutePath();
}
if (urls != null) {
for (URL u : urls) {
if (classpath.length() > 0) {
classpath += File.pathSeparator;
}
classpath += u.getFile();
}
}
// add dependency from DepInterpreter
DepInterpreter depInterpreter = getDepInterpreter();
if (depInterpreter != null) {
DependencyContext depc = depInterpreter.getDependencyContext();
if (depc != null) {
List<File> files = depc.getFiles();
if (files != null) {
for (File f : files) {
if (classpath.length() > 0) {
classpath += File.pathSeparator;
}
classpath += f.getAbsolutePath();
}
}
}
}
pathSettings.v_$eq(classpath);
settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);
// set classloader for scala compiler
settings.explicitParentLoader_$eq(new Some<ClassLoader>(Thread.currentThread()
.getContextClassLoader()));
BooleanSetting b = (BooleanSetting) settings.usejavacp();
b.v_$eq(true);
settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);
PrintStream printStream = new PrintStream(out);
/* spark interpreter */
this.interpreter = new SparkILoop(null, new PrintWriter(out));
interpreter.settings_$eq(settings);
interpreter.createInterpreter();
intp = interpreter.intp();
intp.setContextClassLoader();
intp.initializeSynchronous();
completor = new SparkJLineCompletion(intp);
sc = getSparkContext();
if (sc.getPoolForName("fair").isEmpty()) {
Value schedulingMode = org.apache.spark.scheduler.SchedulingMode.FAIR();
int minimumShare = 0;
int weight = 1;
Pool pool = new Pool("fair", schedulingMode, minimumShare, weight);
sc.taskScheduler().rootPool().addSchedulable(pool);
}
sqlc = getSQLContext();
dep = getDependencyResolver();
z = new ZeppelinContext(sc, sqlc, getHiveContext(), null, dep, printStream);
this.interpreter.loadFiles(settings);
intp.interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
binder = (Map<String, Object>) getValue("_binder");
binder.put("sc", sc);
binder.put("sqlc", sqlc);
binder.put("hiveContext", getHiveContext());
binder.put("z", z);
binder.put("out", printStream);
intp.interpret("@transient val z = "
+ "_binder.get(\"z\").asInstanceOf[com.nflabs.zeppelin.spark.ZeppelinContext]");
intp.interpret("@transient val sc = "
+ "_binder.get(\"sc\").asInstanceOf[org.apache.spark.SparkContext]");
intp.interpret("@transient val sqlc = "
+ "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");
intp.interpret("@transient val hiveContext = "
+ "_binder.get(\"hiveContext\").asInstanceOf[org.apache.spark.sql.hive.HiveContext]");
intp.interpret("import org.apache.spark.SparkContext._");
intp.interpret("import sqlc._");
// add jar
if (depInterpreter != null) {
DependencyContext depc = depInterpreter.getDependencyContext();
if (depc != null) {
List<File> files = depc.getFilesDist();
if (files != null) {
for (File f : files) {
if (f.getName().toLowerCase().endsWith(".jar")) {
sc.addJar(f.getAbsolutePath());
logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
} else {
sc.addFile(f.getAbsolutePath());
logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
}
}
}
}
}
}
private List<File> currentClassPath() {
List<File> paths = classPath(Thread.currentThread().getContextClassLoader());
String[] cps = System.getProperty("java.class.path").split(File.pathSeparator);
if (cps != null) {
for (String cp : cps) {
paths.add(new File(cp));
}
}
return paths;
}
private List<File> classPath(ClassLoader cl) {
List<File> paths = new LinkedList<File>();
if (cl == null) {
return paths;
}
if (cl instanceof URLClassLoader) {
URLClassLoader ucl = (URLClassLoader) cl;
URL[] urls = ucl.getURLs();
if (urls != null) {
for (URL url : urls) {
paths.add(new File(url.getFile()));
}
}
}
return paths;
}
@Override
public List<String> completion(String buf, int cursor) {
ScalaCompleter c = completor.completer();
Candidates ret = c.complete(buf, cursor);
return scala.collection.JavaConversions.asJavaList(ret.candidates());
}
@Override
public void bindValue(String name, Object o) {
if ("form".equals(name) && o instanceof Setting) { // form controller injection from
// Paragraph.jobRun
z.setFormSetting((Setting) o);
}
getResultCode(intp.bindValue(name, o));
}
@Override
public Object getValue(String name) {
Object ret = intp.valueOfTerm(name);
if (ret instanceof None) {
return null;
} else if (ret instanceof Some) {
return ((Some) ret).get();
} else {
return ret;
}
}
private String getJobGroup(InterpreterContext context){
return "zeppelin-" + this.hashCode() + "-" + context.getParagraph().getId();
}
/**
* Interpret a single line.
*/
@Override
public InterpreterResult interpret(String line, InterpreterContext context) {
z.setInterpreterContext(context);
if (line == null || line.trim().length() == 0) {
return new InterpreterResult(Code.SUCCESS);
}
return interpret(line.split("\n"), context);
}
public InterpreterResult interpret(String[] lines, InterpreterContext context) {
synchronized (this) {
sc.setJobGroup(getJobGroup(context), "Zeppelin", false);
InterpreterResult r = interpretInput(lines);
sc.clearJobGroup();
return r;
}
}
public InterpreterResult interpretInput(String[] lines) {
SparkEnv.set(env);
// add print("") to make sure not finishing with comment
// see https://github.com/NFLabs/zeppelin/issues/151
String[] linesToRun = new String[lines.length + 1];
for (int i = 0; i < lines.length; i++) {
linesToRun[i] = lines[i];
}
linesToRun[lines.length] = "print(\"\")";
Console.setOut((java.io.PrintStream) binder.get("out"));
out.reset();
Code r = null;
String incomplete = "";
for (String s : linesToRun) {
scala.tools.nsc.interpreter.Results.Result res = null;
try {
res = intp.interpret(incomplete + s);
} catch (Exception e) {
sc.clearJobGroup();
logger.info("Interpreter exception", e);
return new InterpreterResult(Code.ERROR, e.getMessage());
}
r = getResultCode(res);
if (r == Code.ERROR) {
sc.clearJobGroup();
return new InterpreterResult(r, out.toString());
} else if (r == Code.INCOMPLETE) {
incomplete += s + "\n";
} else {
incomplete = "";
}
}
if (r == Code.INCOMPLETE) {
return new InterpreterResult(r, "Incomplete expression");
} else {
return new InterpreterResult(r, out.toString());
}
}
@Override
public void cancel(InterpreterContext context) {
sc.cancelJobGroup(getJobGroup(context));
}
@Override
public int getProgress(InterpreterContext context) {
String jobGroup = getJobGroup(context);
int completedTasks = 0;
int totalTasks = 0;
DAGScheduler scheduler = sc.dagScheduler();
if (scheduler == null) {
return 0;
}
HashSet<ActiveJob> jobs = scheduler.activeJobs();
if (jobs == null || jobs.size() == 0) {
return 0;
}
Iterator<ActiveJob> it = jobs.iterator();
while (it.hasNext()) {
ActiveJob job = it.next();
String g = (String) job.properties().get("spark.jobGroup.id");
if (jobGroup.equals(g)) {
int[] progressInfo = null;
if (sc.version().startsWith("1.0")) {
progressInfo = getProgressFromStage_1_0x(sparkListener, job.finalStage());
} else if (sc.version().startsWith("1.1")) {
progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
} else if (sc.version().startsWith("1.2")) {
progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
} else {
continue;
}
totalTasks += progressInfo[0];
completedTasks += progressInfo[1];
}
}
if (totalTasks == 0) {
return 0;
}
return completedTasks * 100 / totalTasks;
}
private int[] getProgressFromStage_1_0x(JobProgressListener sparkListener, Stage stage) {
int numTasks = stage.numTasks();
int completedTasks = 0;
Method method;
Object completedTaskInfo = null;
try {
method = sparkListener.getClass().getMethod("stageIdToTasksComplete");
completedTaskInfo =
JavaConversions.asJavaMap((HashMap<Object, Object>) method.invoke(sparkListener)).get(
stage.id());
} catch (NoSuchMethodException | SecurityException e) {
logger.error("Error while getting progress", e);
} catch (IllegalAccessException e) {
logger.error("Error while getting progress", e);
} catch (IllegalArgumentException e) {
logger.error("Error while getting progress", e);
} catch (InvocationTargetException e) {
logger.error("Error while getting progress", e);
}
if (completedTaskInfo != null) {
completedTasks += (int) completedTaskInfo;
}
List<Stage> parents = JavaConversions.asJavaList(stage.parents());
if (parents != null) {
for (Stage s : parents) {
int[] p = getProgressFromStage_1_0x(sparkListener, s);
numTasks += p[0];
completedTasks += p[1];
}
}
return new int[] {numTasks, completedTasks};
}
private int[] getProgressFromStage_1_1x(JobProgressListener sparkListener, Stage stage) {
int numTasks = stage.numTasks();
int completedTasks = 0;
try {
Method stageIdToData = sparkListener.getClass().getMethod("stageIdToData");
HashMap<Tuple2<Object, Object>, Object> stageIdData =
(HashMap<Tuple2<Object, Object>, Object>) stageIdToData.invoke(sparkListener);
Class<?> stageUIDataClass =
this.getClass().forName("org.apache.spark.ui.jobs.UIData$StageUIData");
Method numCompletedTasks = stageUIDataClass.getMethod("numCompleteTasks");
Set<Tuple2<Object, Object>> keys =
JavaConverters.asJavaSetConverter(stageIdData.keySet()).asJava();
for (Tuple2<Object, Object> k : keys) {
if (stage.id() == (int) k._1()) {
Object uiData = stageIdData.get(k).get();
completedTasks += (int) numCompletedTasks.invoke(uiData);
}
}
} catch (Exception e) {
logger.error("Error on getting progress information", e);
}
List<Stage> parents = JavaConversions.asJavaList(stage.parents());
if (parents != null) {
for (Stage s : parents) {
int[] p = getProgressFromStage_1_1x(sparkListener, s);
numTasks += p[0];
completedTasks += p[1];
}
}
return new int[] {numTasks, completedTasks};
}
private Code getResultCode(scala.tools.nsc.interpreter.Results.Result r) {
if (r instanceof scala.tools.nsc.interpreter.Results.Success$) {
return Code.SUCCESS;
} else if (r instanceof scala.tools.nsc.interpreter.Results.Incomplete$) {
return Code.INCOMPLETE;
} else {
return Code.ERROR;
}
}
@Override
public void close() {
sc.stop();
sc = null;
intp.close();
}
@Override
public FormType getFormType() {
return FormType.NATIVE;
}
public JobProgressListener getJobProgressListener() {
return sparkListener;
}
@Override
public Scheduler getScheduler() {
return SchedulerFactory.singleton().createOrGetFIFOScheduler(
SparkInterpreter.class.getName() + this.hashCode());
}
}
| apache-2.0 |
MarcGuiot/globsframework | src/test/java/org/globsframework/utils/MultiSetMapTest.java | 860 | package org.globsframework.utils;
import org.globsframework.utils.collections.MultiSetMap;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class MultiSetMapTest {
private MultiSetMap<String, Integer> map = new MultiSetMap<>();
@Test
public void testStandardUsage() throws Exception {
map.put("a", 1);
map.put("a", 2);
map.put("b", 3);
map.put("b", 3);
assertEquals(3, map.size());
TestUtils.assertEquals(map.get("a"), 1, 2);
TestUtils.assertEquals(map.get("b"), 3);
}
@Test
public void testReturnedListsCannotBeModified() throws Exception {
map.put("a", 1);
try {
map.get("a").add(2);
fail();
}
catch (UnsupportedOperationException e) {
}
}
}
| apache-2.0 |
leogoing/spring_jeesite | spring-context-4.0/org/springframework/cache/interceptor/ExpressionEvaluator.java | 5631 | /*
* Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cache.interceptor;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.springframework.cache.Cache;
import org.springframework.core.DefaultParameterNameDiscoverer;
import org.springframework.core.ParameterNameDiscoverer;
import org.springframework.expression.EvaluationContext;
import org.springframework.expression.Expression;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.util.ObjectUtils;
/**
* Utility class handling the SpEL expression parsing.
* Meant to be used as a reusable, thread-safe component.
*
* <p>Performs internal caching for performance reasons
* using {@link MethodCacheKey}.
*
* @author Costin Leau
* @author Phillip Webb
* @author Sam Brannen
* @author Stephane Nicoll
* @since 3.1
*/
class ExpressionEvaluator {
public static final Object NO_RESULT = new Object();
private final SpelExpressionParser parser = new SpelExpressionParser();
// shared param discoverer since it caches data internally
private final ParameterNameDiscoverer paramNameDiscoverer = new DefaultParameterNameDiscoverer();
private final Map<ExpressionKey, Expression> keyCache
= new ConcurrentHashMap<ExpressionKey, Expression>(64);
private final Map<ExpressionKey, Expression> conditionCache
= new ConcurrentHashMap<ExpressionKey, Expression>(64);
private final Map<ExpressionKey, Expression> unlessCache
= new ConcurrentHashMap<ExpressionKey, Expression>(64);
private final Map<MethodCacheKey, Method> targetMethodCache = new ConcurrentHashMap<MethodCacheKey, Method>(64);
/**
* Create an {@link EvaluationContext} without a return value.
* @see #createEvaluationContext(Collection, Method, Object[], Object, Class, Object)
*/
public EvaluationContext createEvaluationContext(Collection<? extends Cache> caches,
Method method, Object[] args, Object target, Class<?> targetClass) {
return createEvaluationContext(caches, method, args, target, targetClass,
NO_RESULT);
}
/**
* Create an {@link EvaluationContext}.
*
* @param caches the current caches
* @param method the method
* @param args the method arguments
* @param target the target object
* @param targetClass the target class
* @param result the return value (can be {@code null}) or
* {@link #NO_RESULT} if there is no return at this time
* @return the evaluation context
*/
public EvaluationContext createEvaluationContext(Collection<? extends Cache> caches,
Method method, Object[] args, Object target, Class<?> targetClass,
final Object result) {
CacheExpressionRootObject rootObject = new CacheExpressionRootObject(caches,
method, args, target, targetClass);
LazyParamAwareEvaluationContext evaluationContext = new LazyParamAwareEvaluationContext(rootObject,
this.paramNameDiscoverer, method, args, targetClass, this.targetMethodCache);
if(result != NO_RESULT) {
evaluationContext.setVariable("result", result);
}
return evaluationContext;
}
public Object key(String keyExpression, MethodCacheKey methodKey, EvaluationContext evalContext) {
return getExpression(this.keyCache, keyExpression, methodKey).getValue(evalContext);
}
public boolean condition(String conditionExpression, MethodCacheKey methodKey, EvaluationContext evalContext) {
return getExpression(this.conditionCache, conditionExpression, methodKey).getValue(
evalContext, boolean.class);
}
public boolean unless(String unlessExpression, MethodCacheKey methodKey, EvaluationContext evalContext) {
return getExpression(this.unlessCache, unlessExpression, methodKey).getValue(
evalContext, boolean.class);
}
private Expression getExpression(Map<ExpressionKey, Expression> cache, String expression, MethodCacheKey methodKey) {
ExpressionKey key = createKey(methodKey, expression);
Expression rtn = cache.get(key);
if (rtn == null) {
rtn = this.parser.parseExpression(expression);
cache.put(key, rtn);
}
return rtn;
}
private ExpressionKey createKey(MethodCacheKey methodCacheKey, String expression) {
return new ExpressionKey(methodCacheKey, expression);
}
private static class ExpressionKey {
private final MethodCacheKey methodCacheKey;
private final String expression;
private ExpressionKey(MethodCacheKey methodCacheKey, String expression) {
this.methodCacheKey = methodCacheKey;
this.expression = expression;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (!(other instanceof ExpressionKey)) {
return false;
}
ExpressionKey otherKey = (ExpressionKey) other;
return (this.methodCacheKey.equals(otherKey.methodCacheKey)
&& ObjectUtils.nullSafeEquals(this.expression, otherKey.expression));
}
@Override
public int hashCode() {
return this.methodCacheKey.hashCode() * 29 + (this.expression != null ? this.expression.hashCode() : 0);
}
}
}
| apache-2.0 |
Vilsol/NMSWrapper | src/main/java/me/vilsol/nmswrapper/wraps/unparsed/NMSChatComponentScore.java | 2727 | package me.vilsol.nmswrapper.wraps.unparsed;
import me.vilsol.nmswrapper.NMSWrapper;
import me.vilsol.nmswrapper.reflections.ReflectiveClass;
import me.vilsol.nmswrapper.reflections.ReflectiveMethod;
@ReflectiveClass(name = "ChatComponentScore")
public class NMSChatComponentScore extends NMSChatBaseComponent {
public NMSChatComponentScore(Object nmsObject){
super(nmsObject);
}
public NMSChatComponentScore(String s, String s1){
super("ChatComponentScore", new Object[]{String.class, String.class}, new Object[]{s, s1});
}
/**
* TODO Find correct name
* @see net.minecraft.server.v1_9_R1.ChatComponentScore#b(java.lang.String)
*/
@ReflectiveMethod(name = "b", types = {String.class})
public void b(String s){
NMSWrapper.getInstance().exec(nmsObject, s);
}
/**
* @see net.minecraft.server.v1_9_R1.ChatComponentScore#equals(java.lang.Object)
*/
@ReflectiveMethod(name = "equals", types = {Object.class})
public boolean equals(Object object){
return (boolean) NMSWrapper.getInstance().exec(nmsObject, object);
}
/**
* TODO Find correct name
* @see net.minecraft.server.v1_9_R1.ChatComponentScore#f()
*/
@ReflectiveMethod(name = "f", types = {})
public NMSIChatBaseComponent f(){
return (NMSIChatBaseComponent) NMSWrapper.getInstance().createApplicableObject(NMSWrapper.getInstance().exec(nmsObject));
}
/**
* TODO Find correct name
* @see net.minecraft.server.v1_9_R1.ChatComponentScore#g()
*/
@ReflectiveMethod(name = "g", types = {})
public String g(){
return (String) NMSWrapper.getInstance().exec(nmsObject);
}
/**
* @see net.minecraft.server.v1_9_R1.ChatComponentScore#getText()
*/
@ReflectiveMethod(name = "getText", types = {})
public String getText(){
return (String) NMSWrapper.getInstance().exec(nmsObject);
}
/**
* TODO Find correct name
* @see net.minecraft.server.v1_9_R1.ChatComponentScore#h()
*/
@ReflectiveMethod(name = "h", types = {})
public String h(){
return (String) NMSWrapper.getInstance().exec(nmsObject);
}
/**
* TODO Find correct name
* @see net.minecraft.server.v1_9_R1.ChatComponentScore#i()
*/
@ReflectiveMethod(name = "i", types = {})
public NMSChatComponentScore i(){
return new NMSChatComponentScore(NMSWrapper.getInstance().exec(nmsObject));
}
/**
* @see net.minecraft.server.v1_9_R1.ChatComponentScore#toString()
*/
@ReflectiveMethod(name = "toString", types = {})
public String toString(){
return (String) NMSWrapper.getInstance().exec(nmsObject);
}
} | apache-2.0 |
ernestp/consulo | platform/projectModel-impl/src/com/intellij/openapi/components/impl/BasePathMacroManager.java | 8298 | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.components.impl;
import com.intellij.application.options.PathMacrosImpl;
import com.intellij.application.options.ReplacePathToMacroMap;
import com.intellij.openapi.application.PathMacros;
import com.intellij.openapi.components.ExpandMacroToPathMap;
import com.intellij.openapi.components.PathMacroManager;
import com.intellij.openapi.components.PathMacroUtil;
import com.intellij.openapi.components.TrackingPathMacroSubstitutor;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.IVirtualFileSystem;
import com.intellij.openapi.vfs.StandardFileSystems;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.util.containers.FactoryMap;
import org.jdom.Element;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public class BasePathMacroManager extends PathMacroManager {
private PathMacrosImpl myPathMacros;
public BasePathMacroManager(@Nullable PathMacros pathMacros) {
myPathMacros = (PathMacrosImpl)pathMacros;
}
protected static void addFileHierarchyReplacements(ExpandMacroToPathMap result, String macroName, @Nullable String path) {
if (path == null) return;
addFileHierarchyReplacements(result, getLocalFileSystem().findFileByPath(path), "$" + macroName + "$");
}
private static void addFileHierarchyReplacements(ExpandMacroToPathMap result, @Nullable VirtualFile f, String macro) {
if (f == null) return;
addFileHierarchyReplacements(result, f.getParent(), macro + "/..");
result.put(macro, StringUtil.trimEnd(f.getPath(), "/"));
}
protected static void addFileHierarchyReplacements(ReplacePathToMacroMap result, String macroName, @Nullable String path, @Nullable String stopAt) {
if (path == null) return;
String macro = "$" + macroName + "$";
path = StringUtil.trimEnd(FileUtil.toSystemIndependentName(path), "/");
boolean overwrite = true;
while (StringUtil.isNotEmpty(path) && path.contains("/")) {
result.addReplacement(path, macro, overwrite);
if (path.equals(stopAt)) {
break;
}
macro += "/..";
overwrite = false;
path = StringUtil.getPackageName(path, '/');
}
}
private static IVirtualFileSystem getLocalFileSystem() {
// Use VFM directly because of mocks in tests.
return VirtualFileManager.getInstance().getFileSystem(StandardFileSystems.FILE_PROTOCOL);
}
public ExpandMacroToPathMap getExpandMacroMap() {
ExpandMacroToPathMap result = new ExpandMacroToPathMap();
for (Map.Entry<String, String> entry : PathMacroUtil.getGlobalSystemMacros().entrySet()) {
result.addMacroExpand(entry.getKey(), entry.getValue());
}
getPathMacros().addMacroExpands(result);
return result;
}
protected ReplacePathToMacroMap getReplacePathMap() {
ReplacePathToMacroMap result = new ReplacePathToMacroMap();
for (Map.Entry<String, String> entry : PathMacroUtil.getGlobalSystemMacros().entrySet()) {
result.addMacroReplacement(entry.getValue(), entry.getKey());
}
getPathMacros().addMacroReplacements(result);
return result;
}
@Override
public TrackingPathMacroSubstitutor createTrackingSubstitutor() {
return new MyTrackingPathMacroSubstitutor();
}
@Override
public String expandPath(final String path) {
return getExpandMacroMap().substitute(path, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public String collapsePath(final String path) {
return getReplacePathMap().substitute(path, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public void collapsePathsRecursively(final Element element) {
getReplacePathMap().substitute(element, SystemInfo.isFileSystemCaseSensitive, true);
}
@Override
public String collapsePathsRecursively(final String text) {
return getReplacePathMap().substituteRecursively(text, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public void expandPaths(final Element element) {
getExpandMacroMap().substitute(element, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public void collapsePaths(final Element element) {
getReplacePathMap().substitute(element, SystemInfo.isFileSystemCaseSensitive);
}
public PathMacrosImpl getPathMacros() {
if (myPathMacros == null) {
myPathMacros = PathMacrosImpl.getInstanceEx();
}
return myPathMacros;
}
private class MyTrackingPathMacroSubstitutor implements TrackingPathMacroSubstitutor {
private final Map<String, Set<String>> myMacroToComponentNames = new FactoryMap<String, Set<String>>() {
@Override
protected Set<String> create(String key) {
return new HashSet<String>();
}
};
private final Map<String, Set<String>> myComponentNameToMacros = new FactoryMap<String, Set<String>>() {
@Override
protected Set<String> create(String key) {
return new HashSet<String>();
}
};
public MyTrackingPathMacroSubstitutor() {
}
@Override
public void reset() {
myMacroToComponentNames.clear();
myComponentNameToMacros.clear();
}
@Override
public String expandPath(final String path) {
return getExpandMacroMap().substitute(path, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public String collapsePath(final String path) {
return getReplacePathMap().substitute(path, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public void expandPaths(final Element element) {
getExpandMacroMap().substitute(element, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public void collapsePaths(final Element element) {
getReplacePathMap().substitute(element, SystemInfo.isFileSystemCaseSensitive);
}
public int hashCode() {
return getExpandMacroMap().hashCode();
}
@Override
public void invalidateUnknownMacros(final Set<String> macros) {
for (final String macro : macros) {
final Set<String> components = myMacroToComponentNames.get(macro);
for (final String component : components) {
myComponentNameToMacros.remove(component);
}
myMacroToComponentNames.remove(macro);
}
}
@Override
public Collection<String> getComponents(final Collection<String> macros) {
final Set<String> result = new HashSet<String>();
for (String macro : myMacroToComponentNames.keySet()) {
if (macros.contains(macro)) {
result.addAll(myMacroToComponentNames.get(macro));
}
}
return result;
}
@Override
public Collection<String> getUnknownMacros(final String componentName) {
final Set<String> result = new HashSet<String>();
result.addAll(componentName == null ? myMacroToComponentNames.keySet() : myComponentNameToMacros.get(componentName));
return Collections.unmodifiableCollection(result);
}
@Override
public void addUnknownMacros(final String componentName, final Collection<String> unknownMacros) {
if (unknownMacros.isEmpty()) return;
for (String unknownMacro : unknownMacros) {
final Set<String> stringList = myMacroToComponentNames.get(unknownMacro);
stringList.add(componentName);
}
myComponentNameToMacros.get(componentName).addAll(unknownMacros);
}
}
protected static boolean pathsEqual(@Nullable String path1, @Nullable String path2) {
return path1 != null && path2 != null &&
FileUtil.pathsEqual(FileUtil.toSystemIndependentName(path1), FileUtil.toSystemIndependentName(path2));
}
}
| apache-2.0 |
Esri/military-apps-library-java | source/MilitaryAppsLibrary/src/com/esri/militaryapps/util/Utilities.java | 23669 | /*******************************************************************************
* Copyright 2013-2014 Esri
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.esri.militaryapps.util;
import com.esri.militaryapps.model.DomNodeAndDocument;
import java.io.File;
import java.io.StringWriter;
import java.lang.reflect.Method;
import java.net.InetAddress;
import java.net.InterfaceAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.URL;
import java.net.URLClassLoader;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Set;
import java.util.TimeZone;
import java.util.Timer;
import java.util.TimerTask;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
/**
* Utilities that don't belong in a specific class. This class is a necessary evil. :-)
*/
public class Utilities {
/**
* The number of meters in a mile.
*/
public static final double METERS_PER_MILE = (254.0 / 10000.0) * 12.0 * 5280.0;
private static final double FIVE_PI_OVER_TWO = 5.0 * Math.PI / 2.0;
private static final double TWO_PI = 2.0 * Math.PI;
private static final TransformerFactory transformerFactory = TransformerFactory.newInstance();
private static final DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
/**
* A DateFormat object for datetimevalid timestamps.
*/
public static final SimpleDateFormat DATE_FORMAT_GEOMESSAGE =
new SimpleDateFormat("yyyy-MM-dd' 'HH:mm:ss");
static {
DATE_FORMAT_GEOMESSAGE.setTimeZone(TimeZone.getTimeZone("UTC"));
}
/**
* A DateFormat object for military date/time in Zulu time.
*/
public static final SimpleDateFormat DATE_FORMAT_MILITARY_ZULU =
new SimpleDateFormat("ddHHmmss'Z 'MMM' 'yy");
static {
DATE_FORMAT_MILITARY_ZULU.setTimeZone(TimeZone.getTimeZone("UTC"));
}
/**
* A DateFormat object for military date/time in local time.
*/
public static final SimpleDateFormat DATE_FORMAT_MILITARY_LOCAL =
new SimpleDateFormat("ddHHmmss'J 'MMM' 'yy");
private static final Timer localDateFormatTimer = new Timer(true);
static {
localDateFormatTimer.schedule(new TimerTask() {
@Override
public void run() {
//Force re-read of OS time zone
System.getProperties().remove("user.timezone");
TimeZone.setDefault(null);
//Adjust local format
TimeZone tz = TimeZone.getDefault();
DATE_FORMAT_MILITARY_LOCAL.setTimeZone(tz);
DATE_FORMAT_MILITARY_LOCAL.applyPattern("ddHHmmss'" + getMilitaryTimeZoneCharacter(tz) + " 'MMM' 'yy");
}
}, 0, 1000 / 24);
}
private static final int MILLISECONDS_IN_HOUR = 60 * 60 * 1000;
private static char getMilitaryTimeZoneCharacter(TimeZone tz) {
int offset = tz.getOffset(System.currentTimeMillis());
//If it's not a whole number of hours, just return 'J'
int offsetHours = offset / MILLISECONDS_IN_HOUR;
if (0 != offset % MILLISECONDS_IN_HOUR || 12 < offsetHours || -12 > offsetHours) {
return 'J';
} else {
if (0 == offsetHours) {
return 'Z';
} else if (0 < offsetHours) {
char c = (char) ('A' + offsetHours - 1);
if ('J' <= c) {
c += 1;
}
return c;
} else {
return (char) ('N' - offsetHours - 1);
}
}
}
/**
* All non-alphanumeric ASCII characters except '-' and '*', contained in a
* single String.
*/
public static final String MIL_2525C_WHITESPACE_CHARS;
static {
StringBuilder sb = new StringBuilder();
for (char c = 0; c < 128; c++) {
if ((c < '0' || c > '9') && (c < 'A' || c > 'Z') && (c < 'a' || c > 'z')
&& c != '-' && c != '*') {
sb.append(c);
}
}
MIL_2525C_WHITESPACE_CHARS = sb.toString();
}
/**
* Protected constructor because Utilities is not meant to be instantiated.
* Protected instead of private so that an application can extend it for convenience
* (so that you only have to have one Utilities class).
*/
protected Utilities() {
}
/**
* Converts a trigonometric angle to a compass heading. In trigonometry, 0
* radians is east, pi / 2 is north, pi is west, and 3 * pi / 2 is south. In
* compass headings, 0 radians is north, pi / 2 is east, pi is south, and
* 3 * pi / 2 is west.
* @param trigHeading the trigonometric heading, in radians.
* @return the compass heading, in radians.
*/
public static double toCompassHeadingRadians(double trigHeadingRadians) {
double compassHeading = FIVE_PI_OVER_TWO - trigHeadingRadians;
if (TWO_PI <= compassHeading) {
compassHeading -= TWO_PI;
}
else if(0.0 > compassHeading) {
compassHeading += TWO_PI;
}
return compassHeading;
}
/**
* Calculates the compass bearing from one point to another and returns the
* result in degrees.
* @param fromLon the longitude of the location from which the bearing is to
* be calculated.
* @param fromLat the latitude of the location from which the bearing is to
* be calculated.
* @param toLon the longitude of the location to which the bearing is to
* be calculated.
* @param toLat the latitude of the location to which the bearing is to
* be calculated.
* @return the compass bearing from one point to another, in degrees.
*/
public static double calculateBearingDegrees(double fromLon, double fromLat, double toLon, double toLat) {
double currentLatRad = fromLat * Math.PI / 180;
double destinationLatRad = toLat * Math.PI / 180;
double currentLonRad = fromLon * Math.PI / 180;
double destinationLonRad = toLon * Math.PI / 180;
double deltaLonRad = (destinationLonRad - currentLonRad);
double y = Math.sin(deltaLonRad) * Math.cos(destinationLatRad);
double x = Math.cos(currentLatRad) * Math.sin(destinationLatRad) - Math.sin(currentLatRad) * Math.cos(destinationLatRad) * Math.cos(deltaLonRad);
double bearing = Math.atan2(y, x) / Math.PI * 180;
return (bearing + 360) % 360;
}
/**
* Normalizes an angle in degrees to fall between specified minimum and maximum
* values.
* @param angle the angle to normalize.
* @param min the minimum value.
* @param max the maximum value.
* @return an angle equivalent to the input angle, normalized to fall between
* the specified minimum and maximum values.
*/
public static double fixAngleDegrees(double angle, double min, double max) {
while (angle < min) {
angle += 360;
}
while (angle > max) {
angle -= 360;
}
return angle;
}
/**
* Loads a JAR file so the application can access its classes.
* @param jarPath the JAR file.
* @throws Exception if the JAR file cannot be loaded.
*/
public static void loadJar(String jarPath) throws Exception {
File f = new File(jarPath);
URLClassLoader sysloader = (URLClassLoader) ClassLoader.getSystemClassLoader();
Method method = URLClassLoader.class.getDeclaredMethod("addURL", URL.class);
method.setAccessible(true);
method.invoke(sysloader, new Object[]{f.toURI().toURL()});
}
/**
* Translates the given color to a color string used by the ArcGIS for the Military
* GeoEvent Processor schema.
* @param color the color RGB.
* @return a color string used by the ArcGIS for the Military GeoEvent Processor
* schema. ArcGIS Runtime supports the following colors:<br/>
* <br/>
* <ul>
* <li>0xFFFF0000 (Color.RED) returns "1"</li>
* <li>0xFF00FF00 (Color.GREEN) returns "2"</li>
* <li>0xFF0000FF (Color.BLUE) returns "3"</li>
* <li>0xFFFFFF00 (Color.YELLOW) returns "4"</li>
* </ul>
* For other colors, this method returns a hex string of the form #AARRGGBB
* (alpha byte, red byte, green byte, and blue byte).
*/
public static String getAFMGeoEventColorString(int colorRgb) {
if (-65536 == colorRgb) {//red
return "1";
} else if (-256 == colorRgb) {//yellow
return "4";
} else if (-16711936 == colorRgb) {//green
return "2";
} else if (-16776961 == colorRgb) {//blue
return "3";
} else {
/**
* ArcGIS Runtime does not currently support custom chem light colors.
* But we can send a hex string in case some client can use it.
*/
String hex = Integer.toHexString(colorRgb & 0xffffff);
while (hex.length() < 6) {
hex = "0" + hex;
}
return "#" + hex;
}
}
/**
* Converts the string to a best-guess valid MGRS string, if possible.<br/>
* <br/>
* This method checks only the pattern, not the coordinate itself. For example,
* 60CVS1234567890 is valid, but 60CVR1234567890 is not, because zone 60C has
* a VS square but not a VR square. This method considers both of those strings
* to be valid, because both of them match the pattern.<br/>
* <br/>
* This method will check and try to correct at least the following:
* <ul>
* <li>Digits before zone A, B, Y, Z (correction: omit the numbers)</li>
* <li>More than two digits before zone letter (no correction)</li>
* <li>Grid zone number higher than 60 (no correction available)</li>
* <li>100,000-meter square with more than two letters (no correction)</li>
* <li>100,000-meter square with fewer than two letters (no correction available)</li>
* <li>Odd number of easting/northing digits (no correction)</li>
* </ul>
* TODO this method might go away when fromMilitaryGrid handles bad strings gracefully.
* @param mgrs the MGRS string.
* @param referenceMgrs a reference MGRS location for calculating a missing grid
* zone identifier. If mgrs does not include a grid zone identifier,
* this parameter's grid zone identifier will be prepended to mgrs.
* This parameter can be null if mgrs contains a grid zone identifier.
* @return the string itself, or a best guess at a valid equivalent of the string,
* or null if the string is known to be invalid and cannot be converted.
*/
public static String convertToValidMgrs(String mgrs, String referenceMgrs) {
if (null == mgrs) {
return null;
}
//Remove non-alphanumeric
mgrs = mgrs.replaceAll("[^a-zA-Z0-9]", "").toUpperCase();
//Check for MGRS without grid zone identifier and add it if necessary
Matcher gzlessMatcher = Pattern.compile("[A-Z]{2}[0-9]*").matcher(mgrs);
if (null != referenceMgrs && gzlessMatcher.matches()) {
Matcher gzMatcher = Pattern.compile("[0-9]{0,2}[A-Z]").matcher(referenceMgrs);
if (gzMatcher.find() && 0 == gzMatcher.start()) {
mgrs = referenceMgrs.substring(0, gzMatcher.end()) + mgrs;
}
}
/**
* A good MGRS string looks like this:
* <grid zone ID><2 letters><even number of digits>
* A grid zone ID looks like this:
* <1-2 digits><letter C-X>
* or
* <letter A, B, Y, or Z>
* That means every MGRS string looks like this:
* <0-2 digits><3 letters><even number of digits>
*/
Pattern pattern = Pattern.compile("[A-Z]+");
Matcher matcher = pattern.matcher(mgrs);
if (!matcher.find()) {
//There are no letters; nothing we can do
return null;
}
Pattern polarPattern = Pattern.compile("[ABYZ][A-Z]{2}[0-9]*");
if (0 == matcher.start()) {
//This string starts with letters; make sure it's polar
if (!polarPattern.matcher(mgrs).matches()) {
return null;
}
} else {
//If the first letter is A, B, Y, or Z, omit the leading digits
char firstLetter = mgrs.charAt(matcher.start());
if ('A' == firstLetter || 'B' == firstLetter || 'Y' == firstLetter || 'Z' == firstLetter) {
mgrs = mgrs.substring(matcher.start());
if (!polarPattern.matcher(mgrs).matches()) {
return null;
}
} else {
Matcher nonPolarMatcher = Pattern.compile("[0-9]{1,2}[C-X][A-Z]{2}[0-9]*").matcher(mgrs);
if (!nonPolarMatcher.matches()) {
return null;
}
//This string starts with numbers; see what they are
int gridZoneNumber = Integer.parseInt(mgrs.substring(0, matcher.start()));
if (0 >= gridZoneNumber || 60 < gridZoneNumber) {
return null;
}
}
}
//Last thing: return null if there's an odd number of easting/northing digits
Matcher threeLetters = Pattern.compile("[A-Z]{3}").matcher(mgrs);
threeLetters.find();
if (threeLetters.end() < mgrs.length()) {
String eastingNorthing = mgrs.substring(threeLetters.end());
if (1 == eastingNorthing.length() % 2) {
return null;
}
}
return mgrs;
}
/**
* Parses an XML Schema Part 2 dateTime string and returns a corresponding Calendar.
* Java SE includes this capability in javax.xml.bind.DataTypeConverter.parseDateTime(String),
* but Android does not.
* @param xmlDateTime an XML Schema Part 2 dateTime string, as defined in
* http://www.w3.org/TR/xmlschema-2 . This method ignores
* any spaces in the string.
* @return a Calendar set to the time corresponding to xmlDateTime.
* @throws Exception if the input is null or improperly formatted.
*/
public static Calendar parseXmlDateTime(String xmlDateTime) throws Exception {
xmlDateTime = xmlDateTime.replace(" ", "");
int dashIndex = xmlDateTime.indexOf('-');
if (0 == dashIndex) {
dashIndex = xmlDateTime.indexOf('-', 1);
}
int year = Integer.parseInt(xmlDateTime.substring(0, dashIndex));
int cursor = dashIndex + 1;
dashIndex += 3;
int month = Integer.parseInt(xmlDateTime.substring(cursor, dashIndex));
cursor = dashIndex + 1;
int tIndex = dashIndex + 3;
int day = Integer.parseInt(xmlDateTime.substring(cursor, tIndex));
cursor = tIndex + 1;
int colonIndex = tIndex + 3;
int hour = Integer.parseInt(xmlDateTime.substring(cursor, colonIndex));
cursor = colonIndex + 1;
colonIndex += 3;
int minute = Integer.parseInt(xmlDateTime.substring(cursor, colonIndex));
cursor = colonIndex + 1;
int wholeSeconds = Integer.parseInt(xmlDateTime.substring(cursor, cursor + 2));
cursor = cursor += 2;
//Fractional seconds and time zone are optional. That means we might be done.
float fractionalSeconds = 0f;
TimeZone timeZone = null;
if (xmlDateTime.length() > (colonIndex + 3)) {
//Check for fractional seconds
if ('.' == xmlDateTime.charAt(cursor)) {
//Get all the numeric chars
char nextChar;
float factor = 0.1f;
while ('0' <= (nextChar = xmlDateTime.charAt(++cursor)) && '9' >= nextChar) {
fractionalSeconds += factor * (float) (nextChar - 48);
factor *= 0.1;
}
}
//Check for time zone
if (cursor < xmlDateTime.length()) {
String tzString = xmlDateTime.substring(cursor);
if ("Z".equals(tzString)) {
tzString = "UTC";
}
timeZone = TimeZone.getTimeZone(tzString);
}
}
if (null == timeZone) {
timeZone = TimeZone.getTimeZone("UTC");
}
Calendar cal = Calendar.getInstance(timeZone);
//In Java Calendar, month is zero-based, so subtract one
cal.set(year, month - 1, day, hour, minute, wholeSeconds);
cal.set(Calendar.MILLISECOND, Math.round(fractionalSeconds * 1000f));
return cal;
}
/**
* Returns the abbreviation for the angular unit with the specified WKID. ArcGIS
* SDKs typically have an AngularUnit class with a getAbbreviation method, but
* sometimes getAbbreviation doesn't return the abbreviation you might expect.
* For example, it might return "deg" for degrees instead of returning the degrees
* symbol. This method offers better abbreviations for some angular units. You could
* call it this way:<br/>
* <pre>String abbr = getAngularUnitAbbreviation(
* myAngularUnit.getID(),
* myAngularUnit.getAbbreviation());</pre>
* @param wkid the angular unit's WKID.
* @param defaultValue the value to be returned if this method does not know about
* an abbreviation for the angular unit with the specified WKID.
* @return the angular unit's abbreviation, or <code>defaultValue</code> if this
* method does not know about an abbreviation for the angular unit with
* the specified WKID.
*/
public static String getAngularUnitAbbreviation(int wkid, String defaultValue) {
if (9102 == wkid) { //degrees
return "\u00B0";
} else if (9114 == wkid) {// mils
return "\u20A5";
} else {
return defaultValue;
}
}
/**
* A convenience method for creating the following structure:
* <geomessages>
* <geomessage />
* </geomessages>
* @return
* @throws ParserConfigurationException
*/
public static DomNodeAndDocument createGeomessageDocument() throws ParserConfigurationException {
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
Document doc = docBuilder.newDocument();
Element geomessagesElement = doc.createElement("geomessages");
doc.appendChild(geomessagesElement);
Element geomessageElement = doc.createElement("geomessage");
geomessageElement.setAttribute("v", "1.0");
geomessagesElement.appendChild(geomessageElement);
return new DomNodeAndDocument(geomessageElement, doc);
}
/**
* Convenience method for adding an XML text element. For example, if you call
* the method like this:<br/>
* <br/>
* <code>Utilities.addTextElement(parent, "lastName", "Lockwood");</code><br/>
* <br/>
* The new node, when rendered as a string, will look like this:<br/>
* <br/>
* <code><lastName>Lockwood</lastName>
* @param document the document where the parent resides, and where the new text
* element will reside. The document may or may not be the parent
* node itself.
* @param parentNode the parent node of the new text element.
* @param key the name of the element.
* @param value the string within the element.
*/
public static void addTextElement(Document document, Node parentNode, String elementName, String elementText) {
Element textElement = document.createElement(elementName);
textElement.appendChild(document.createTextNode(elementText));
parentNode.appendChild(textElement);
}
/**
* Converts a DOM Document to a string.
* @param doc the DOM Document.
* @return the string representation.
* @throws TransformerConfigurationException
* @throws TransformerException
*/
public static String documentToString(Document doc) throws TransformerException {
StringWriter xmlStringWriter = new StringWriter();
transformerFactory.newTransformer().transform(
new DOMSource(doc), new StreamResult(xmlStringWriter));
return xmlStringWriter.toString();
}
/**
* Calculates a list of UDP broadcast addresses for the current network interface(s).
* Adapted from http://stackoverflow.com/questions/4887675/detecting-all-available-networks-broadcast-addresses-in-java .
* @return a list of UDP broadcast addresses for the current network interface(s).
*/
public static Set<InetAddress> getUdpBroadcastAddresses() {
HashSet<InetAddress> listOfBroadcasts = new HashSet<InetAddress>();
try {
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
while (interfaces.hasMoreElements()) {
NetworkInterface iface = (NetworkInterface) interfaces.nextElement();
if (iface == null) {
continue;
}
if (!iface.isLoopback() && iface.isUp()) {
for (InterfaceAddress address : iface.getInterfaceAddresses()) {
if (address == null) {
continue;
}
InetAddress broadcast = address.getBroadcast();
if (broadcast != null) {
listOfBroadcasts.add(broadcast);
}
}
}
}
} catch (SocketException ex) {
Logger.getLogger(Utilities.class.getName()).log(Level.SEVERE, "Error while getting network interfaces", ex);
}
return listOfBroadcasts;
}
}
| apache-2.0 |
IsuraD/identity-governance | components/org.wso2.carbon.identity.recovery/src/main/java/org/wso2/carbon/identity/recovery/handler/UserSelfRegistrationHandler.java | 22277 | /*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations und
*/
package org.wso2.carbon.identity.recovery.handler;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.CarbonConstants;
import org.wso2.carbon.identity.application.common.model.User;
import org.wso2.carbon.identity.base.IdentityRuntimeException;
import org.wso2.carbon.identity.core.bean.context.MessageContext;
import org.wso2.carbon.identity.core.handler.InitConfig;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.identity.event.IdentityEventConstants;
import org.wso2.carbon.identity.event.IdentityEventException;
import org.wso2.carbon.identity.event.event.Event;
import org.wso2.carbon.identity.event.handler.AbstractEventHandler;
import org.wso2.carbon.identity.governance.IdentityGovernanceUtil;
import org.wso2.carbon.identity.governance.IdentityMgtConstants;
import org.wso2.carbon.identity.governance.exceptions.notiification.NotificationChannelManagerClientException;
import org.wso2.carbon.identity.governance.exceptions.notiification.NotificationChannelManagerException;
import org.wso2.carbon.identity.governance.service.notification.NotificationChannelManager;
import org.wso2.carbon.identity.governance.service.notification.NotificationChannels;
import org.wso2.carbon.identity.recovery.IdentityRecoveryClientException;
import org.wso2.carbon.identity.recovery.IdentityRecoveryConstants;
import org.wso2.carbon.identity.recovery.IdentityRecoveryException;
import org.wso2.carbon.identity.recovery.RecoveryScenarios;
import org.wso2.carbon.identity.recovery.RecoverySteps;
import org.wso2.carbon.identity.recovery.internal.IdentityRecoveryServiceDataHolder;
import org.wso2.carbon.identity.recovery.model.Property;
import org.wso2.carbon.identity.recovery.model.UserRecoveryData;
import org.wso2.carbon.identity.recovery.store.JDBCRecoveryDataStore;
import org.wso2.carbon.identity.recovery.store.UserRecoveryDataStore;
import org.wso2.carbon.identity.recovery.util.Utils;
import org.wso2.carbon.registry.core.utils.UUIDGenerator;
import org.wso2.carbon.user.core.UserCoreConstants;
import org.wso2.carbon.user.core.UserStoreException;
import org.wso2.carbon.user.core.UserStoreManager;
import java.security.SecureRandom;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class UserSelfRegistrationHandler extends AbstractEventHandler {
private static final Log log = LogFactory.getLog(UserSelfRegistrationHandler.class);
public String getName() {
return "userSelfRegistration";
}
public String getFriendlyName() {
return "User Self Registration";
}
@Override
public void handleEvent(Event event) throws IdentityEventException {
Map<String, Object> eventProperties = event.getEventProperties();
String userName = (String) eventProperties.get(IdentityEventConstants.EventProperty.USER_NAME);
UserStoreManager userStoreManager = (UserStoreManager) eventProperties.get(IdentityEventConstants.EventProperty.USER_STORE_MANAGER);
String tenantDomain = (String) eventProperties.get(IdentityEventConstants.EventProperty.TENANT_DOMAIN);
String domainName = userStoreManager.getRealmConfiguration().getUserStoreProperty(UserCoreConstants.RealmConfig.PROPERTY_DOMAIN_NAME);
String[] roleList = (String[]) eventProperties.get(IdentityEventConstants.EventProperty.ROLE_LIST);
User user = new User();
user.setUserName(userName);
user.setTenantDomain(tenantDomain);
user.setUserStoreDomain(domainName);
boolean enable = Boolean.parseBoolean(Utils.getConnectorConfig(
IdentityRecoveryConstants.ConnectorConfig.ENABLE_SELF_SIGNUP, user.getTenantDomain()));
if (!enable) {
//Self signup feature is disabled
if (log.isDebugEnabled()) {
log.debug("Self signup feature is disabled in tenant: " + tenantDomain);
}
return;
}
//Check selfSignupRole is in the request. If it is not there, this handler will not do anything. just retrun
if (roleList == null) {
return;
} else {
List<String> roles = Arrays.asList(roleList);
if (!roles.contains(IdentityRecoveryConstants.SELF_SIGNUP_ROLE)) {
return;
}
}
boolean isAccountLockOnCreation = Boolean.parseBoolean(Utils.getConnectorConfig
(IdentityRecoveryConstants.ConnectorConfig.ACCOUNT_LOCK_ON_CREATION, user.getTenantDomain()));
boolean isNotificationInternallyManage = Boolean.parseBoolean(Utils.getConnectorConfig
(IdentityRecoveryConstants.ConnectorConfig.SIGN_UP_NOTIFICATION_INTERNALLY_MANAGE, user.getTenantDomain()));
if (IdentityEventConstants.Event.POST_ADD_USER.equals(event.getEventName())) {
UserRecoveryDataStore userRecoveryDataStore = JDBCRecoveryDataStore.getInstance();
try {
// Get the user preferred notification channel.
String preferredChannel = resolveNotificationChannel(eventProperties, userName, tenantDomain,
domainName);
// If the preferred channel is already verified, no need to send the notifications or lock
// the account.
boolean notificationChannelVerified = isNotificationChannelVerified(userName, tenantDomain,
preferredChannel, eventProperties);
if (notificationChannelVerified) {
return;
}
// If notifications are externally managed, no send notifications.
if (isNotificationInternallyManage && isAccountLockOnCreation) {
userRecoveryDataStore.invalidate(user);
// Create a secret key based on the preferred notification channel.
String secretKey = generateSecretKey(preferredChannel);
// Resolve event name.
String eventName = resolveEventName(preferredChannel, userName, domainName, tenantDomain);
UserRecoveryData recoveryDataDO = new UserRecoveryData(user, secretKey,
RecoveryScenarios.SELF_SIGN_UP, RecoverySteps.CONFIRM_SIGN_UP);
// Notified channel is stored in remaining setIds for recovery purposes.
recoveryDataDO.setRemainingSetIds(preferredChannel);
userRecoveryDataStore.store(recoveryDataDO);
triggerNotification(user, preferredChannel, secretKey, Utils.getArbitraryProperties(), eventName);
}
} catch (IdentityRecoveryException e) {
throw new IdentityEventException("Error while sending self sign up notification ", e);
}
if (isAccountLockOnCreation) {
HashMap<String, String> userClaims = new HashMap<>();
//Need to lock user account
userClaims.put(IdentityRecoveryConstants.ACCOUNT_LOCKED_CLAIM, Boolean.TRUE.toString());
if (Utils.isAccountStateClaimExisting(tenantDomain)) {
userClaims.put(IdentityRecoveryConstants.ACCOUNT_STATE_CLAIM_URI,
IdentityRecoveryConstants.PENDING_SELF_REGISTRATION);
}
try {
userStoreManager.setUserClaimValues(user.getUserName() , userClaims, null);
if (log.isDebugEnabled()) {
log.debug("Locked user account: " + user.getUserName());
}
} catch (UserStoreException e) {
throw new IdentityEventException("Error while lock user account :" + user.getUserName(), e);
}
}
}
}
/**
* Resolve the event name according to the notification channel.
*
* @param preferredChannel User preferred notification channel
* @param userName Username
* @param domainName Domain name
* @param tenantDomain Tenant domain name
* @return Resolved event name
*/
private String resolveEventName(String preferredChannel, String userName, String domainName, String tenantDomain) {
String eventName;
if (NotificationChannels.EMAIL_CHANNEL.getChannelType().equals(preferredChannel)) {
eventName = IdentityEventConstants.Event.TRIGGER_NOTIFICATION;
} else {
eventName = IdentityRecoveryConstants.NOTIFICATION_EVENTNAME_PREFIX + preferredChannel
+ IdentityRecoveryConstants.NOTIFICATION_EVENTNAME_SUFFIX;
}
if (log.isDebugEnabled()) {
String message = String
.format("For user : %1$s in domain : %2$s, notifications were sent from the event : %3$s",
domainName + CarbonConstants.DOMAIN_SEPARATOR + userName, tenantDomain, eventName);
log.debug(message);
}
return eventName;
}
/**
* Resolve the preferred notification channel for the user.
*
* @param eventProperties Event properties
* @param userName Username
* @param tenantDomain Tenant domain of the user
* @param domainName Userstore domain name of the user
* @return Resolved preferred notification channel
* @throws IdentityEventException Error while resolving the notification channel
*/
private String resolveNotificationChannel(Map<String, Object> eventProperties, String userName, String tenantDomain,
String domainName) throws IdentityEventException {
// If channel resolving logic is not enabled, return the server default notification channel. Do not need to
// resolve using user preferred channel.
if (!Boolean.parseBoolean(
IdentityUtil.getProperty(IdentityMgtConstants.PropertyConfig.RESOLVE_NOTIFICATION_CHANNELS))) {
return IdentityGovernanceUtil.getDefaultNotificationChannel();
}
// Get the user preferred notification channel.
String preferredChannel = (String) eventProperties.get(IdentityRecoveryConstants.PREFERRED_CHANNEL_CLAIM);
// Resolve preferred notification channel.
if (StringUtils.isEmpty(preferredChannel)) {
NotificationChannelManager notificationChannelManager = Utils.getNotificationChannelManager();
try {
preferredChannel = notificationChannelManager
.resolveCommunicationChannel(userName, tenantDomain, domainName);
} catch (NotificationChannelManagerException e) {
handledNotificationChannelManagerException(e, userName, domainName, tenantDomain);
}
}
if (log.isDebugEnabled()) {
String message = String
.format("Notification channel : %1$s for the user : %2$s in domain : %3$s.",
preferredChannel, domainName + CarbonConstants.DOMAIN_SEPARATOR + userName,
tenantDomain);
log.debug(message);
}
return preferredChannel;
}
/**
* Handles NotificationChannelManagerException thrown in resolving the channel.
*
* @param e NotificationChannelManagerException
* @param userName Username
* @param domainName Domain name
* @param tenantDomain Tenant domain name
* @throws IdentityEventException Error resolving the channel.
*/
private void handledNotificationChannelManagerException(NotificationChannelManagerException e, String userName,
String domainName, String tenantDomain) throws IdentityEventException {
if (StringUtils.isNotEmpty(e.getErrorCode()) && StringUtils.isNotEmpty(e.getMessage())) {
if (IdentityMgtConstants.ErrorMessages.ERROR_CODE_NO_NOTIFICATION_CHANNELS.getCode()
.equals(e.getErrorCode())) {
if (log.isDebugEnabled()) {
String error = String.format("No communication channel for user : %1$s in domain: %2$s",
domainName + CarbonConstants.DOMAIN_SEPARATOR + userName, tenantDomain);
log.debug(error, e);
}
} else {
if (log.isDebugEnabled()) {
String error = String.format("Error getting claim values for user : %1$s in domain: %2$s",
domainName + CarbonConstants.DOMAIN_SEPARATOR + userName, tenantDomain);
log.debug(error, e);
}
}
} else {
if (log.isDebugEnabled()) {
String error = String.format("Error getting claim values for user : %1$s in domain: %2$s",
domainName + CarbonConstants.DOMAIN_SEPARATOR + userName, tenantDomain);
log.debug(error, e);
}
}
throw new IdentityEventException(e.getErrorCode(), e.getMessage());
}
/**
* Checks whether the notification channel is already verified for the user.
*
* @param username Username
* @param tenantDomain Tenant domain
* @param notificationChannel Notification channel
* @param eventProperties Properties related to the event
* @return True if the channel is already verified.
*/
private boolean isNotificationChannelVerified(String username, String tenantDomain, String notificationChannel,
Map<String, Object> eventProperties) throws IdentityRecoveryClientException {
boolean isEnableAccountLockForVerifiedPreferredChannelEnabled = Boolean.parseBoolean(IdentityUtil.getProperty(
IdentityRecoveryConstants.ConnectorConfig.ENABLE_ACCOUNT_LOCK_FOR_VERIFIED_PREFERRED_CHANNEL));
if (!isEnableAccountLockForVerifiedPreferredChannelEnabled) {
if (log.isDebugEnabled()) {
String message = String
.format("SkipAccountLockOnVerifiedPreferredChannel is enabled for user : %s in domain : %s. "
+ "Checking whether the user is already verified", username, tenantDomain);
log.debug(message);
}
// Get the notification channel which matches the given channel type.
NotificationChannels channel = getNotificationChannel(username, notificationChannel);
// Get the matching claim uri for the channel.
String verifiedClaimUri = channel.getVerifiedClaimUrl();
// Get the verified status for given channel.
boolean notificationChannelVerified = Boolean.parseBoolean((String) eventProperties.get(verifiedClaimUri));
if (notificationChannelVerified) {
if (log.isDebugEnabled()) {
String message = String
.format("Preferred Notification channel : %1$s is verified for the user : %2$s "
+ "in domain : %3$s. Therefore, no notifications will be sent.",
notificationChannel, username, tenantDomain);
log.debug(message);
}
}
return notificationChannelVerified;
}
return false;
}
/**
* Get the NotificationChannels object which matches the given channel type.
*
* @param username Username
* @param notificationChannel Notification channel
* @return NotificationChannels object
* @throws IdentityRecoveryClientException Unsupported channel type
*/
private NotificationChannels getNotificationChannel(String username, String notificationChannel)
throws IdentityRecoveryClientException {
NotificationChannels channel;
try {
channel = NotificationChannels.getNotificationChannel(notificationChannel);
} catch (NotificationChannelManagerClientException e) {
if (log.isDebugEnabled()) {
log.debug("Unsupported channel type : " + notificationChannel);
}
throw Utils.handleClientException(
IdentityRecoveryConstants.ErrorMessages.ERROR_CODE_UNSUPPORTED_PREFERRED_CHANNELS, username, e);
}
return channel;
}
@Override
public void init(InitConfig configuration) throws IdentityRuntimeException {
super.init(configuration);
}
@Override
public int getPriority(MessageContext messageContext) {
return 60;
}
protected void triggerNotification(User user, String type, String code, Property[] props) throws
IdentityRecoveryException {
if (log.isDebugEnabled()) {
log.debug("Sending self user registration notification user: " + user.getUserName());
}
String eventName = IdentityEventConstants.Event.TRIGGER_NOTIFICATION;
HashMap<String, Object> properties = new HashMap<>();
properties.put(IdentityEventConstants.EventProperty.USER_NAME, user.getUserName());
properties.put(IdentityEventConstants.EventProperty.TENANT_DOMAIN, user.getTenantDomain());
properties.put(IdentityEventConstants.EventProperty.USER_STORE_DOMAIN, user.getUserStoreDomain());
if (props != null && props.length > 0) {
for (int i = 0; i < props.length; i++) {
properties.put(props[i].getKey(), props[i].getValue());
}
}
if (StringUtils.isNotBlank(code)) {
properties.put(IdentityRecoveryConstants.CONFIRMATION_CODE, code);
}
properties.put(IdentityRecoveryConstants.TEMPLATE_TYPE, type);
Event identityMgtEvent = new Event(eventName, properties);
try {
IdentityRecoveryServiceDataHolder.getInstance().getIdentityEventService().handleEvent(identityMgtEvent);
} catch (IdentityEventException e) {
throw Utils.handleServerException(IdentityRecoveryConstants.ErrorMessages.ERROR_CODE_TRIGGER_NOTIFICATION, user
.getUserName(), e);
}
}
/**
* Triggers notifications according to the given event name.
*
* @param user User
* @param notificationChannel Notification channel
* @param code Recovery code
* @param props Event properties
* @param eventName Name of the event
* @throws IdentityRecoveryException Error triggering notifications
*/
private void triggerNotification(User user, String notificationChannel, String code, Property[] props,
String eventName) throws IdentityRecoveryException {
if (log.isDebugEnabled()) {
log.debug("Sending self user registration notification user: " + user.getUserName());
}
HashMap<String, Object> properties = new HashMap<>();
properties.put(IdentityEventConstants.EventProperty.USER_NAME, user.getUserName());
properties.put(IdentityEventConstants.EventProperty.TENANT_DOMAIN, user.getTenantDomain());
properties.put(IdentityEventConstants.EventProperty.USER_STORE_DOMAIN, user.getUserStoreDomain());
properties.put(IdentityEventConstants.EventProperty.NOTIFICATION_CHANNEL, notificationChannel);
if (props != null && props.length > 0) {
for (Property prop : props) {
properties.put(prop.getKey(), prop.getValue());
}
}
if (StringUtils.isNotBlank(code)) {
properties.put(IdentityRecoveryConstants.CONFIRMATION_CODE, code);
}
properties.put(IdentityRecoveryConstants.TEMPLATE_TYPE,
IdentityRecoveryConstants.NOTIFICATION_TYPE_ACCOUNT_CONFIRM);
Event identityMgtEvent = new Event(eventName, properties);
try {
IdentityRecoveryServiceDataHolder.getInstance().getIdentityEventService().handleEvent(identityMgtEvent);
} catch (IdentityEventException e) {
throw Utils.handleServerException(IdentityRecoveryConstants.ErrorMessages.ERROR_CODE_TRIGGER_NOTIFICATION,
user.getUserName(), e);
}
}
/**
* Generate an OTP for password recovery via mobile Channel
*
* @return OTP
*/
private String generateSMSOTP() {
char[] chars = IdentityRecoveryConstants.SMS_OTP_GENERATE_CHAR_SET.toCharArray();
SecureRandom rnd = new SecureRandom();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < IdentityRecoveryConstants.SMS_OTP_CODE_LENGTH; i++) {
sb.append(chars[rnd.nextInt(chars.length)]);
}
return sb.toString();
}
/**
* Generate a secret key according to the given channel. Method will generate an OTP for mobile channel and a
* UUID for other channels.
*
* @param channel Recovery notification channel.
* @return Secret key
*/
private String generateSecretKey(String channel) {
if (NotificationChannels.SMS_CHANNEL.getChannelType().equals(channel)) {
if (log.isDebugEnabled()) {
log.debug("OTP was generated for the user for channel : " + channel);
}
return generateSMSOTP();
} else {
if (log.isDebugEnabled()) {
log.debug("UUID was generated for the user for channel : " + channel);
}
return UUIDGenerator.generateUUID();
}
}
}
| apache-2.0 |
mogoweb/365browser | app/src/main/java/org/chromium/chrome/browser/bookmarks/BookmarkActionBar.java | 9004 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.bookmarks;
import android.content.Context;
import android.support.v7.widget.Toolbar.OnMenuItemClickListener;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.MenuItem;
import android.view.View.OnClickListener;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.bookmarks.BookmarkBridge.BookmarkItem;
import org.chromium.chrome.browser.bookmarks.BookmarkBridge.BookmarkModelObserver;
import org.chromium.chrome.browser.preferences.PrefServiceBridge;
import org.chromium.chrome.browser.tabmodel.TabModel.TabLaunchType;
import org.chromium.chrome.browser.tabmodel.document.TabDelegate;
import org.chromium.chrome.browser.widget.selection.SelectableListToolbar;
import org.chromium.chrome.browser.widget.selection.SelectionDelegate;
import org.chromium.components.bookmarks.BookmarkId;
import org.chromium.components.bookmarks.BookmarkType;
import org.chromium.content_public.browser.LoadUrlParams;
import java.util.List;
/**
* Main action bar of bookmark UI. It is responsible for displaying title and buttons
* associated with the current context.
*/
public class BookmarkActionBar extends SelectableListToolbar<BookmarkId>
implements BookmarkUIObserver, OnMenuItemClickListener, OnClickListener {
private BookmarkItem mCurrentFolder;
private BookmarkDelegate mDelegate;
private BookmarkModelObserver mBookmarkModelObserver = new BookmarkModelObserver() {
@Override
public void bookmarkModelChanged() {
onSelectionStateChange(mDelegate.getSelectionDelegate().getSelectedItems());
}
};
public BookmarkActionBar(Context context, AttributeSet attrs) {
super(context, attrs);
setNavigationOnClickListener(this);
inflateMenu(R.menu.bookmark_action_bar_menu);
setOnMenuItemClickListener(this);
getMenu().findItem(R.id.selection_mode_edit_menu_id).setTitle(R.string.edit_bookmark);
getMenu().findItem(R.id.selection_mode_move_menu_id)
.setTitle(R.string.bookmark_action_bar_move);
getMenu().findItem(R.id.selection_mode_delete_menu_id)
.setTitle(R.string.bookmark_action_bar_delete);
}
@Override
public void onNavigationBack() {
if (mIsSearching) {
super.onNavigationBack();
return;
}
mDelegate.openFolder(mCurrentFolder.getParentId());
}
@Override
public boolean onMenuItemClick(MenuItem menuItem) {
hideOverflowMenu();
SelectionDelegate<BookmarkId> selectionDelegate = mDelegate.getSelectionDelegate();
if (menuItem.getItemId() == R.id.edit_menu_id) {
BookmarkAddEditFolderActivity.startEditFolderActivity(getContext(),
mCurrentFolder.getId());
return true;
} else if (menuItem.getItemId() == R.id.close_menu_id) {
BookmarkUtils.finishActivityOnPhone(getContext());
return true;
} else if (menuItem.getItemId() == R.id.search_menu_id) {
mDelegate.openSearchUI();
return true;
} else if (menuItem.getItemId() == R.id.selection_mode_edit_menu_id) {
List<BookmarkId> list = selectionDelegate.getSelectedItems();
assert list.size() == 1;
BookmarkItem item = mDelegate.getModel().getBookmarkById(list.get(0));
if (item.isFolder()) {
BookmarkAddEditFolderActivity.startEditFolderActivity(getContext(), item.getId());
} else {
BookmarkUtils.startEditActivity(getContext(), item.getId());
}
return true;
} else if (menuItem.getItemId() == R.id.selection_mode_move_menu_id) {
List<BookmarkId> list = selectionDelegate.getSelectedItems();
if (list.size() >= 1) {
BookmarkFolderSelectActivity.startFolderSelectActivity(getContext(),
list.toArray(new BookmarkId[list.size()]));
}
return true;
} else if (menuItem.getItemId() == R.id.selection_mode_delete_menu_id) {
mDelegate.getModel().deleteBookmarks(
selectionDelegate.getSelectedItems().toArray(new BookmarkId[0]));
return true;
} else if (menuItem.getItemId() == R.id.selection_open_in_new_tab_id) {
openBookmarksInNewTabs(selectionDelegate.getSelectedItems(), new TabDelegate(false),
mDelegate.getModel());
selectionDelegate.clearSelection();
return true;
} else if (menuItem.getItemId() == R.id.selection_open_in_incognito_tab_id) {
openBookmarksInNewTabs(selectionDelegate.getSelectedItems(), new TabDelegate(true),
mDelegate.getModel());
selectionDelegate.clearSelection();
return true;
}
assert false : "Unhandled menu click.";
return false;
}
void showLoadingUi() {
setTitle(null);
setNavigationButton(NAVIGATION_BUTTON_NONE);
getMenu().findItem(R.id.search_menu_id).setVisible(false);
getMenu().findItem(R.id.edit_menu_id).setVisible(false);
}
// BookmarkUIObserver implementations.
@Override
public void onBookmarkDelegateInitialized(BookmarkDelegate delegate) {
mDelegate = delegate;
mDelegate.addUIObserver(this);
if (!delegate.isDialogUi()) getMenu().removeItem(R.id.close_menu_id);
delegate.getModel().addObserver(mBookmarkModelObserver);
}
@Override
public void onDestroy() {
mDelegate.removeUIObserver(this);
mDelegate.getModel().removeObserver(mBookmarkModelObserver);
}
@Override
public void onFolderStateSet(BookmarkId folder) {
mCurrentFolder = mDelegate.getModel().getBookmarkById(folder);
getMenu().findItem(R.id.search_menu_id).setVisible(true);
getMenu().findItem(R.id.edit_menu_id).setVisible(mCurrentFolder.isEditable());
// If this is the root folder, we can't go up anymore.
if (folder.equals(mDelegate.getModel().getRootFolderId())) {
setTitle(R.string.bookmarks);
setNavigationButton(NAVIGATION_BUTTON_NONE);
return;
}
if (mDelegate.getModel().getTopLevelFolderParentIDs().contains(mCurrentFolder.getParentId())
&& TextUtils.isEmpty(mCurrentFolder.getTitle())) {
setTitle(R.string.bookmarks);
} else {
setTitle(mCurrentFolder.getTitle());
}
setNavigationButton(NAVIGATION_BUTTON_BACK);
}
@Override
public void onSearchStateSet() {}
@Override
public void onSelectionStateChange(List<BookmarkId> selectedBookmarks) {
super.onSelectionStateChange(selectedBookmarks);
// The super class registers itself as a SelectionObserver before
// #onBookmarkDelegateInitialized() is called. Return early if mDelegate has not been set.
if (mDelegate == null) return;
if (mIsSelectionEnabled) {
// Editing a bookmark action on multiple selected items doesn't make sense. So disable.
getMenu().findItem(R.id.selection_mode_edit_menu_id).setVisible(
selectedBookmarks.size() == 1);
getMenu().findItem(R.id.selection_open_in_incognito_tab_id)
.setVisible(PrefServiceBridge.getInstance().isIncognitoModeEnabled());
// It does not make sense to open a folder in new tab.
for (BookmarkId bookmark : selectedBookmarks) {
BookmarkItem item = mDelegate.getModel().getBookmarkById(bookmark);
if (item != null && item.isFolder()) {
getMenu().findItem(R.id.selection_open_in_new_tab_id).setVisible(false);
getMenu().findItem(R.id.selection_open_in_incognito_tab_id).setVisible(false);
break;
}
}
// Partner bookmarks can't move, so if the selection includes a partner bookmark,
// disable the move button.
for (BookmarkId bookmark : selectedBookmarks) {
if (bookmark.getType() == BookmarkType.PARTNER) {
getMenu().findItem(R.id.selection_mode_move_menu_id).setVisible(false);
break;
}
}
} else {
mDelegate.notifyStateChange(this);
}
}
private static void openBookmarksInNewTabs(
List<BookmarkId> bookmarks, TabDelegate tabDelegate, BookmarkModel model) {
for (BookmarkId id : bookmarks) {
tabDelegate.createNewTab(new LoadUrlParams(model.getBookmarkById(id).getUrl()),
TabLaunchType.FROM_LONGPRESS_BACKGROUND, null);
}
}
}
| apache-2.0 |
Lidiany/SIAS | sias-model/src/sias/model/dao/UnidadeAtendimentoDAO.java | 8047 | package sias.model.dao;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import sias.model.base.BaseDAO;
import sias.model.pojo.Municipio;
import sias.model.pojo.Uf;
import sias.model.pojo.UnidadeAtendimento;
public class UnidadeAtendimentoDAO implements BaseDAO<UnidadeAtendimento> {
public static final String CRITERION_NOME_I_LIKE = "1";
public static final String CRITERION_MUNICIPIO_ID_EQ = "2";
public static final String CRITERION_UF_ID_EQ = "3";
@Override
public void create(UnidadeAtendimento e, Connection conn) throws Exception {
String sql = "INSERT INTO unidadeatendimento(nome, numerounidade, responsavel, telefone, logradouro, numero, complementoendereco, bairro, cep, municipio_fk, uf_fk) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) RETURNING id;";
PreparedStatement ps = conn.prepareStatement(sql);
int i = 0;
ps.setString(++i, e.getNome());
ps.setString(++i, e.getNumeroUnidade());
ps.setString(++i, e.getResponsavel());
ps.setString(++i, e.getTelefone());
ps.setString(++i, e.getLogradouro());
ps.setString(++i, e.getNumero());
ps.setString(++i, e.getComplementoEndereco());
ps.setString(++i, e.getBairro());
ps.setString(++i, e.getCep());
if (e.getMunicipio() != null) {
ps.setLong(++i, e.getMunicipio().getId());
} else {
ps.setNull(++i, Types.BIGINT);
}
if (e.getUf() != null){
ps.setLong(++i, e.getUf().getId());
} else{
ps.setNull(++i, Types.BIGINT);
}
ResultSet rs = ps.executeQuery();
if (rs.next()) {
e.setId(rs.getLong("id"));
}
rs.close();
ps.close();
}
@Override
public UnidadeAtendimento readById(Long id, Connection conn) throws Exception {
UnidadeAtendimento e = null;
String sql = "SELECT unidadeatendimento.*, municipio.id as municipio_id, municipio.nome as municipio_nome, uf.id as uf_id, uf.nome as uf_nome, uf.sigla as uf_sigla FROM unidadeatendimento LEFT JOIN municipio ON unidadeatendimento.municipio_fk = municipio.id LEFT JOIN uf ON unidadeatendimento.uf_fk = uf.id WHERE unidadeatendimento.id=?";
PreparedStatement ps = conn.prepareStatement(sql);
ps.setLong(1, id);
ResultSet rs = ps.executeQuery();
if (rs.next()) {
e = new UnidadeAtendimento();
e.setId(rs.getLong("id"));
e.setNome(rs.getString("nome"));
e.setNumeroUnidade(rs.getString("numerounidade"));
e.setResponsavel(rs.getString("responsavel"));
e.setTelefone(rs.getString("telefone"));
e.setLogradouro(rs.getString("logradouro"));
e.setNumero(rs.getString("numero"));
e.setComplementoEndereco(rs.getString("complementoendereco"));
e.setBairro(rs.getString("bairro"));
e.setCep(rs.getString("cep"));
Municipio municipio = new Municipio();
municipio.setId(rs.getLong("municipio_id"));
municipio.setNome(rs.getString("municipio_nome"));
e.setMunicipio(municipio);
Uf uf = new Uf();
uf.setId(rs.getLong("uf_id"));
uf.setNome(rs.getString("uf_nome"));
uf.setSigla(rs.getString("uf_sigla"));
e.setUf(uf);
/* PrintWriter out = new PrintWriter("C:\\Temp\\teste.txt");
out.println(ps.toString());
out.close();*/
}
rs.close();
ps.close();
return e;
}
@Override
public List<UnidadeAtendimento> readByCriteria(Map<String, Object> criteria, Connection conn) throws Exception {
List<UnidadeAtendimento> lista = new ArrayList<UnidadeAtendimento>();
String sql = "SELECT unidadeatendimento.*, municipio.id as municipio_id, municipio.nome as municipio_nome, uf.id as uf_id, uf.nome as uf_nome, uf.sigla as uf_sigla FROM unidadeatendimento LEFT JOIN uf ON unidadeatendimento.uf_fk = uf.id LEFT JOIN municipio ON unidadeatendimento.municipio_fk = municipio.id WHERE 1=1";
String criterionNomeILike = (String) criteria.get(CRITERION_NOME_I_LIKE);
if (criterionNomeILike != null && !criterionNomeILike.trim().isEmpty()) {
sql += " AND unidadeatendimento.nome ILIKE '%" + criterionNomeILike + "%'";
}
Long criterionMunicipioIdEq = (Long) criteria.get(CRITERION_MUNICIPIO_ID_EQ);
if (criterionMunicipioIdEq != null && criterionMunicipioIdEq > 0) {
sql += " AND municipio_fk ='" + criterionMunicipioIdEq + "'";
}
Long criterionUfIdEq = (Long) criteria.get(CRITERION_UF_ID_EQ);
if (criterionUfIdEq != null && criterionUfIdEq > 0) {
sql += " AND uf_fk ='" + criterionUfIdEq + "'";
}
Statement s = conn.createStatement();
ResultSet rs = s.executeQuery(sql);
while (rs.next()) {
UnidadeAtendimento unidadeAtendimento = new UnidadeAtendimento();
unidadeAtendimento.setId(rs.getLong("id"));
unidadeAtendimento.setNome(rs.getString("nome"));
unidadeAtendimento.setNumeroUnidade(rs.getString("numerounidade"));
unidadeAtendimento.setResponsavel(rs.getString("responsavel"));
unidadeAtendimento.setTelefone(rs.getString("telefone"));
unidadeAtendimento.setLogradouro(rs.getString("logradouro"));
unidadeAtendimento.setNumero(rs.getString("numero"));
unidadeAtendimento.setComplementoEndereco(rs.getString("complementoendereco"));
unidadeAtendimento.setBairro(rs.getString("bairro"));
unidadeAtendimento.setCep(rs.getString("cep"));
Municipio municipio = new Municipio();
municipio.setId(rs.getLong("municipio_id"));
municipio.setNome(rs.getString("municipio_nome"));
unidadeAtendimento.setMunicipio(municipio);
Uf uf = new Uf();
uf.setId(rs.getLong("uf_id"));
uf.setNome(rs.getString("uf_nome"));
uf.setSigla(rs.getString("uf_sigla"));
unidadeAtendimento.setUf(uf);
lista.add(unidadeAtendimento);
}
rs.close();
s.close();
return lista;
}
@Override
public void update(UnidadeAtendimento e, Connection conn) throws Exception {
String sql = "UPDATE unidadeatendimento SET nome=?, numeroUnidade=?, responsavel=?, telefone=?, logradouro=?, numero=?, complementoEndereco=?, bairro=?, cep=?, municipio_fk=?, uf_fk=? WHERE unidadeatendimento.id=?;";
PreparedStatement ps = conn.prepareStatement(sql);
int i = 0;
ps.setString(++i, e.getNome());
ps.setString(++i, e.getNumeroUnidade());
ps.setString(++i, e.getResponsavel());
ps.setString(++i, e.getTelefone());
ps.setString(++i, e.getLogradouro());
ps.setString(++i, e.getNumero());
ps.setString(++i, e.getComplementoEndereco());
ps.setString(++i, e.getBairro());
ps.setString(++i, e.getCep());
if (e.getMunicipio() != null) {
ps.setLong(++i, e.getMunicipio().getId());
} else {
ps.setNull(++i, Types.BIGINT);
}
if (e.getUf() != null){
ps.setLong(++i, e.getUf().getId());
} else{
ps.setNull(++i, Types.BIGINT);
}
ps.setLong(++i, e.getId());
ps.execute();
ps.close();
}
@Override
public void delete(Long id, Connection conn) throws Exception {
Statement st = conn.createStatement();
st.execute("DELETE FROM unidadeatendimento WHERE id =" + id);
st.close();
}
}
| apache-2.0 |
provirus/tinyapps | Savior/saviorgui/src/main/java/ca/pgon/saviorgui/profile/Profile.java | 677 | /*
Tinyapps
https://github.com/provirus/tinyapps
Copyright (c) 2014-2021 Foilen (https://foilen.com)
The MIT License
http://opensource.org/licenses/MIT
*/
package ca.pgon.saviorgui.profile;
import java.util.List;
import java.util.Map;
public class Profile {
public String sourceFileSystemType;
public String sourceBasePath;
public Map<String, String> sourceParams;
public String destinationFileSystemType;
public String destinationBasePath;
public Map<String, String> destinationParams;
public String engineType;
public boolean modDate, modSize, modMD5;
public List<String> ignoreList;
}
| apache-2.0 |
aravindc/databenecommons | src/main/java/org/databene/commons/converter/ParseFormatConverter.java | 1714 | /*
* Copyright (C) 2004-2015 Volker Bergmann (volker.bergmann@bergmann-it.de).
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.databene.commons.converter;
import org.databene.commons.ConversionException;
import java.text.Format;
import java.text.ParseException;
/**
* Converts a String to an object by using a java.lang.Format object's format() method.
* Created: 30.08.2006 19:48:09
* @param <T> the object type to convert to
* @since 0.1
* @author Volker Bergmann
*/
public class ParseFormatConverter<T> extends FormatBasedConverter<String, T> {
public ParseFormatConverter(Class<T> targetType, Format format, boolean threadSafe) {
super(String.class, targetType, format, threadSafe);
}
/** Converts an object to a String by using the format's format() method. */
@Override
@SuppressWarnings("unchecked")
public T convert(String source) throws ConversionException {
if (source == null)
return null;
try {
return (T) format.parseObject(source);
} catch (ParseException e) {
throw new ConversionException(e);
}
}
}
| apache-2.0 |
penger/z_scheduler | src/main/java/com/diaodu/service/BatchService.java | 3451 | package com.diaodu.service;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import com.diaodu.core.GlobalMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.diaodu.dao.BatchDao;
import com.diaodu.dao.TaskDao;
import com.diaodu.dao.impl.BatchDaoImpl;
import com.diaodu.dao.impl.TaskDaoImpl;
import com.diaodu.db.JDBCUtils;
import com.diaodu.domain.Batch;
import com.diaodu.domain.Constants;
import com.diaodu.domain.Task;
public class BatchService {
Logger log = LoggerFactory.getLogger(getClass());
public List<Batch> getAllBatch(){
List<Batch> allBatch = null;
try {
BatchDao batchdao=new BatchDaoImpl();
allBatch = batchdao.getAllBatch();
} catch (SQLException e) {
e.printStackTrace();
}finally {
JDBCUtils.close();
}
return allBatch;
}
public int addBatch(Batch batch){
int rows=0;
JDBCUtils.startTransaction();
try {
BatchDao batchdao=new BatchDaoImpl();
rows = batchdao.addBatch(batch);
JDBCUtils.commit();
} catch (SQLException e) {
JDBCUtils.rollback();
e.printStackTrace();
}finally {
JDBCUtils.close();
}
GlobalMap.refeshMap();
return rows;
}
public int deleteBatch(int id){
int rows=0;
JDBCUtils.startTransaction();
try {
BatchDao batchdao=new BatchDaoImpl();
rows = batchdao.deleteBatch(id);
JDBCUtils.commit();
} catch (SQLException e) {
JDBCUtils.rollback();
e.printStackTrace();
}finally {
JDBCUtils.close();
}
GlobalMap.refeshMap();
return rows;
}
public Batch getBatchByID(String id){
Batch b = null ;
try {
BatchDao batchdao=new BatchDaoImpl();
b =batchdao.getBatchByID(id);
} catch (SQLException e) {
JDBCUtils.rollback();
e.printStackTrace();
}finally {
JDBCUtils.close();
}
return b;
}
public int updateBatch(Batch b){
int rows=0;
JDBCUtils.startTransaction();
try {
BatchDao batchdao=new BatchDaoImpl();
rows = batchdao.updateBatch(b);
JDBCUtils.commit();
} catch (SQLException e) {
JDBCUtils.rollback();
e.printStackTrace();
}finally {
JDBCUtils.close();
}
GlobalMap.refeshMap();
return rows;
}
//获取当前Batch下面的所有Task
public List<Task> getAllTaskByBatchID(int batchid){
List<Task> list =null;
try {
TaskDao taskdao =new TaskDaoImpl();;
list = taskdao.getTaskListByBatchID(batchid);
for (Task task : list) {
task.getStatus();
}
} catch (SQLException e) {
e.printStackTrace();
}finally{
JDBCUtils.close();
}
return list;
}
public boolean executeBatch(int batchid,String taskdate){
List<Task> taskList = getAllTaskByBatchID(batchid);
int size = taskList.size();
for(int i=0;i<size;i++){
Task task = taskList.get(i);
TaskService taskService = new TaskService();
Map<String, String> executeMap = taskService.executeTask(task, taskdate);
//跳出剩下的任务
if(!executeMap.get(Constants.EXIT_CODE).equals("0")){
break;
}
}
return true;
}
/**
* 根据批次id 获取批次内未正常完成的任务列表,便于邮件通知
* 2016-8-22 09:59:15
* @param batchid
* @return
*/
public List<Task> getFailTasksByBatchID(Integer batchid) {
TaskDaoImpl taskDao = new TaskDaoImpl();
List taskList = null;
try {
taskList = taskDao.getFailTasksByBatchID(batchid);
} catch (SQLException e) {
e.printStackTrace();
}finally {
JDBCUtils.close();
}
return taskList;
}
}
| apache-2.0 |
hurricup/intellij-community | platform/vcs-api/src/com/intellij/openapi/vcs/history/VcsAnnotationCachedProxy.java | 8280 | /*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.history;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.openapi.vcs.AbstractVcs;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.actions.VcsContextFactory;
import com.intellij.openapi.vcs.annotate.AnnotationProvider;
import com.intellij.openapi.vcs.annotate.FileAnnotation;
import com.intellij.openapi.vcs.annotate.VcsAnnotation;
import com.intellij.openapi.vcs.annotate.VcsCacheableAnnotationProvider;
import com.intellij.openapi.vcs.changes.ContentRevision;
import com.intellij.openapi.vcs.diff.DiffProvider;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @author irengrig
* Date: 3/17/11
* Time: 7:51 PM
*/
public class VcsAnnotationCachedProxy implements AnnotationProvider {
private final VcsHistoryCache myCache;
private final AbstractVcs myVcs;
private final static Logger LOG = Logger.getInstance("#com.intellij.openapi.vcs.history.VcsAnnotationCachedProxy");
private final AnnotationProvider myAnnotationProvider;
public VcsAnnotationCachedProxy(final AbstractVcs vcs, final VcsHistoryCache cache) {
assert vcs.getAnnotationProvider() instanceof VcsCacheableAnnotationProvider;
myVcs = vcs;
myCache = cache;
myAnnotationProvider = myVcs.getAnnotationProvider();
}
@Override
public FileAnnotation annotate(final VirtualFile file) throws VcsException {
final DiffProvider diffProvider = myVcs.getDiffProvider();
final VcsRevisionNumber currentRevision = diffProvider.getCurrentRevision(file);
return annotate(file, currentRevision, true, new ThrowableComputable<FileAnnotation, VcsException>() {
@Override
public FileAnnotation compute() throws VcsException {
return myAnnotationProvider.annotate(file);
}
});
}
@Override
public FileAnnotation annotate(final VirtualFile file, final VcsFileRevision revision) throws VcsException {
return annotate(file, revision.getRevisionNumber(), false, new ThrowableComputable<FileAnnotation, VcsException>() {
@Override
public FileAnnotation compute() throws VcsException {
return myAnnotationProvider.annotate(file, revision);
}
});
}
/**
* @param currentRevision - just a hint for optimization
*/
private FileAnnotation annotate(VirtualFile file, final VcsRevisionNumber revisionNumber, final boolean currentRevision,
final ThrowableComputable<FileAnnotation, VcsException> delegate) throws VcsException {
final AnnotationProvider annotationProvider = myAnnotationProvider;
final FilePath filePath = VcsContextFactory.SERVICE.getInstance().createFilePathOn(file);
final VcsCacheableAnnotationProvider cacheableAnnotationProvider = (VcsCacheableAnnotationProvider)annotationProvider;
VcsAnnotation vcsAnnotation = null;
if (revisionNumber != null) {
vcsAnnotation = myCache.get(VcsContextFactory.SERVICE.getInstance().createFilePathOn(file), myVcs.getKeyInstanceMethod(), revisionNumber);
}
if (vcsAnnotation != null) {
final VcsHistoryProvider historyProvider = myVcs.getVcsHistoryProvider();
// question is whether we need "not moved" path here?
FileAnnotation restored = cacheableAnnotationProvider.restore(vcsAnnotation, revisionNumber);
if (restored != null) {
return restored;
}
final ContentRevision fileContent = myVcs.getDiffProvider().createFileContent(revisionNumber, file);
final VcsAbstractHistorySession history = getHistory(revisionNumber, filePath, historyProvider, vcsAnnotation.getFirstRevision());
if (history == null) return null;
restored = cacheableAnnotationProvider.
restore(vcsAnnotation, history, fileContent.getContent(), currentRevision, revisionNumber);
if (restored != null) {
return restored;
}
}
final FileAnnotation fileAnnotation = delegate.compute();
vcsAnnotation = cacheableAnnotationProvider.createCacheable(fileAnnotation);
if (vcsAnnotation == null) return fileAnnotation;
if (revisionNumber != null) {
myCache.put(filePath, myVcs.getKeyInstanceMethod(), revisionNumber, vcsAnnotation);
}
return fileAnnotation;
}
private VcsAbstractHistorySession getHistory(VcsRevisionNumber revision, FilePath filePath, VcsHistoryProvider historyProvider,
@Nullable final VcsRevisionNumber firstRevision) throws VcsException {
final boolean historyCacheSupported = historyProvider instanceof VcsCacheableHistorySessionFactory;
if (historyCacheSupported) {
final VcsCacheableHistorySessionFactory cacheableHistorySessionFactory = (VcsCacheableHistorySessionFactory)historyProvider;
final VcsAbstractHistorySession cachedSession =
myCache.getMaybePartial(filePath, myVcs.getKeyInstanceMethod(), cacheableHistorySessionFactory);
if (cachedSession != null && ! cachedSession.getRevisionList().isEmpty()) {
final VcsFileRevision recentRevision = cachedSession.getRevisionList().get(0);
if (recentRevision.getRevisionNumber().compareTo(revision) >= 0 && (firstRevision == null || cachedSession.getHistoryAsMap().containsKey(firstRevision))) {
return cachedSession;
}
}
}
// history may be also cut
final VcsAbstractHistorySession sessionFor;
if (firstRevision != null) {
sessionFor = limitedHistory(filePath, firstRevision);
} else {
sessionFor = (VcsAbstractHistorySession) historyProvider.createSessionFor(filePath);
}
if (sessionFor != null && historyCacheSupported) {
final VcsCacheableHistorySessionFactory cacheableHistorySessionFactory = (VcsCacheableHistorySessionFactory)historyProvider;
final FilePath correctedPath = cacheableHistorySessionFactory.getUsedFilePath(sessionFor);
myCache.put(filePath, correctedPath, myVcs.getKeyInstanceMethod(), sessionFor, cacheableHistorySessionFactory, firstRevision == null);
}
return sessionFor;
}
@Override
public boolean isAnnotationValid(@NotNull VcsFileRevision rev) {
return myAnnotationProvider.isAnnotationValid(rev);
}
private VcsAbstractHistorySession limitedHistory(final FilePath filePath, @NotNull final VcsRevisionNumber firstNumber) throws VcsException {
final VcsAbstractHistorySession[] result = new VcsAbstractHistorySession[1];
final VcsException[] exc = new VcsException[1];
try {
myVcs.getVcsHistoryProvider().reportAppendableHistory(filePath, new VcsAppendableHistorySessionPartner() {
@Override
public void reportCreatedEmptySession(VcsAbstractHistorySession session) {
result[0] = session;
}
@Override
public void acceptRevision(VcsFileRevision revision) {
result[0].appendRevision(revision);
if (firstNumber.equals(revision.getRevisionNumber())) throw new ProcessCanceledException();
}
@Override
public void reportException(VcsException exception) {
exc[0] = exception;
}
@Override
public void finished() {
}
@Override
public void beforeRefresh() {
}
@Override
public void forceRefresh() {
}
});
} catch (ProcessCanceledException e) {
// ok
}
if (exc[0] != null) {
throw exc[0];
}
return result[0];
}
}
| apache-2.0 |
sahan/RoboZombie | robozombie/src/main/java/com/lonepulse/robozombie/proxy/InvocationContext.java | 6706 | package com.lonepulse.robozombie.proxy;
/*
* #%L
* RoboZombie
* %%
* Copyright (C) 2013 - 2014 Lonepulse
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static com.lonepulse.robozombie.util.Assert.assertNotNull;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* <p>Represents a single <b>request invocation</b> on an endpoint proxy. The context comprises
* of both runtime information and request or endpoint metadata, as well as a reference to the proxy
* instance on which the request was invoked.</p>
*
* <p>{@link InvocationContext}s can be created using a new {@link InvocationContext.Builder} supplied
* via {@link InvocationContext#newBuilder()}.</p>
*
* <p>Each context is <b>pseudo-immutable</b> to the extent that its shallow-state cannot be altered
* and neither can its deep-state with the exception of any <b>mutable arguments</b> accessed via
* {@link #getArguments()}.</p>
*
* @version 1.1.0
* <br><br>
* @since 1.3.0
* <br><br>
* @author <a href="http://sahan.me">Lahiru Sahan Jayasinghe</a>
*/
public final class InvocationContext {
/**
* <p>This contract defines the services for creating an {@link InvocationContext}. It collects all
* the information which is governed by a single proxy invocation context.</p>
*
* @version 1.1.0
* <br><br>
* @since 1.3.0
* <br><br>
* @author <a href="http://sahan.me">Lahiru Sahan Jayasinghe</a>
*/
static interface Builder {
/**
* <p>Sets the endpoint's definition interface whose request was invoked using a proxy.</p>
*
* @param endpoint
* the endpoint interface {@link Class} to which invoked request belongs
* <br><br>
* @return the current instance of the {@link Builder} with the endpoint assigned
* <br><br>
* @since 1.3.0
*/
Builder setEndpoint(Class<?> endpoint);
/**
* <p>Sets the dynamic proxy for the endpoint on the which the request was invoked.</p>
*
* @param proxy
* the proxy on which the request was invoked
* <br><br>
* @return the current instance of the {@link Builder} with the proxy assigned
* <br><br>
* @since 1.3.0
*/
Builder setProxy(Object proxy);
/**
* <p>Sets the {@link Method} on the endpoint interface which defines the invoked request.</p>
*
* @param request
* the {@link Method} which defines the request
* <br><br>
* @return the current instance of the {@link Builder} with the request assigned
* <br><br>
* @since 1.3.0
*/
Builder setRequest(Method request);
/**
* <p>Sets the runtime arguments which were supplied to the proxy upon request invocation.</p>
*
* @param arguments
* the runtime request arguments supplied to the endpoint proxy method
* <br><br>
* @return the current instance of the {@link Builder} with the request assigned
* <br><br>
* @since 1.3.0
*/
Builder setArguments(Object[] arguments);
/**
* <p>Takes the supplied information and creates a new instance of {@link InvocationContext}.</p>
*
* @return a new instance of {@link InvocationContext} containing the supplied information
* <br><br>
* @since 1.3.0
*/
InvocationContext build();
}
private static final class InvocationContextBuilder implements Builder {
private Class<?> endpoint;
private Object proxy;
private Method request;
private List<Object> arguments;
@Override
public Builder setEndpoint(Class<?> endpoint) {
this.endpoint = assertNotNull(endpoint);
return this;
}
@Override
public Builder setProxy(Object proxy) {
this.proxy = assertNotNull(proxy);
return this;
}
@Override
public Builder setRequest(Method request) {
this.request = assertNotNull(request);
return this;
}
@Override
public Builder setArguments(Object[] arguments) { //null args imply zero parameters
this.arguments = Collections.unmodifiableList(
arguments == null? new ArrayList<Object>() :Arrays.asList(arguments));
return this;
}
@Override
public InvocationContext build() {
return new InvocationContext(this);
}
}
/**
* <p>Returns an new {@link Builder} which can be used to construct an {@link InvocationContext} by
* supplying the necessary information.</p>
*
* @return a new instance of {@link Builder} for constructing an {@link InvocationContext}
* <br><br>
* @since 1.3.0
*/
static Builder newBuilder() {
return new InvocationContextBuilder();
}
private final Class<?> endpoint;
private final Object proxy;
private final Method request;
private final List<Object> arguments;
private InvocationContext(InvocationContextBuilder builder) {
this.endpoint = builder.endpoint;
this.proxy = builder.proxy;
this.request = builder.request;
this.arguments = builder.arguments;
}
/**
* <p>Retrieves the endpoint interface definition on which the request was invoked.</p>
*
* <p>See {@link Builder#setEndpoint(Class)}</p>
*
* @return the {@link Class} of the request endpoint
* <br><br>
* @since 1.3.0
*/
public Class<? extends Object> getEndpoint() {
return endpoint;
}
/**
* <p>Retrieves the dynamic proxy for the endpoint on which the request was invoked.</p>
*
* <p>See {@link Builder#setProxy(Object)}</p>
*
* @return the endpoint proxy on which the request was invoked
* <br><br>
* @since 1.3.0
*/
public Object getProxy() {
return proxy;
}
/**
* <p>Retrieves the {@link Method} on the endpoint interface which defines the invoked request.</p>
*
* <p>See {@link Builder#setRequest(Method)}</p>
*
* @return the definition for the invoked request
* <br><br>
* @since 1.3.0
*/
public Method getRequest() {
return request;
}
/**
* <p>Retrieves the runtime arguments supplied to the endpoint proxy upon request invocation.</p>
*
* <p>See {@link Builder#setArguments(Object[])}</p>
*
* @return the arguments with which the request was invoked
* <br><br>
* @since 1.3.0
*/
public List<Object> getArguments() {
return arguments;
}
}
| apache-2.0 |
lizhanhui/Alibaba_RocketMQ | rocketmq-example/src/main/java/com/alibaba/rocketmq/example/verify/FindDuplication.java | 3090 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.rocketmq.example.verify;
import com.alibaba.rocketmq.client.exception.MQBrokerException;
import com.alibaba.rocketmq.client.exception.MQClientException;
import com.alibaba.rocketmq.client.producer.DefaultMQProducer;
import com.alibaba.rocketmq.common.message.MessageExt;
import com.alibaba.rocketmq.remoting.exception.RemotingException;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class FindDuplication {
public static void main(String[] args) throws MQClientException, IOException, RemotingException, InterruptedException, MQBrokerException {
DefaultMQProducer producer = new DefaultMQProducer("FindDuplication");
producer.start();
File file = new File(args[0]);
BufferedReader bufferedReader = new BufferedReader(new FileReader(file));
String line = null;
Set<String> msgIds = new HashSet<>();
while (null != (line = bufferedReader.readLine())) {
if (msgIds.contains(line.trim())) {
System.out.println("Found duplicated msgIds");
} else {
msgIds.add(line.trim());
}
}
Map<String, List<String>> keyIds = new HashMap<>();
for (String msgId : msgIds) {
System.out.println("Query msg by ID: " + msgId);
MessageExt message = producer.viewMessage(msgId);
if (null != message) {
System.out.println("Found");
if (keyIds.containsKey(message.getKeys())) {
keyIds.get(message.getKeys()).add(msgId);
} else {
keyIds.put(message.getKeys(), Arrays.asList(msgId));
}
} else {
System.out.println("Not Found");
}
}
System.out.println("Duplication Result:");
for (Map.Entry<String, List<String>> next : keyIds.entrySet()) {
if (next.getValue().size() > 1) {
System.out.println(next.getKey() + ": " + next.getValue());
}
}
producer.shutdown();
}
}
| apache-2.0 |
PascalSchumacher/byte-buddy | byte-buddy-dep/src/main/java/net/bytebuddy/implementation/bind/MethodDelegationBinder.java | 42172 | package net.bytebuddy.implementation.bind;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.implementation.Implementation;
import net.bytebuddy.implementation.bytecode.StackManipulation;
import net.bytebuddy.implementation.bytecode.member.MethodInvocation;
import org.objectweb.asm.MethodVisitor;
import java.util.*;
/**
* A method delegation binder is responsible for creating a method binding for a <i>source method</i> to a
* <i>target method</i>. Such a binding allows to implement the source method by calling the target method.
* <p> </p>
* Usually, an implementation will attempt to bind a specific source method to a set of target method candidates
* where all legal bindings are considered for binding. To chose a specific candidate, an
* {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.AmbiguityResolver}
* will be consulted for selecting a <i>best</i> binding.
*/
public interface MethodDelegationBinder {
/**
* Attempts a binding of a source method to a given target method.
*
* @param implementationTarget The target of the current implementation onto which this binding
* is to be applied.
* @param source The method that is to be bound to the {@code target} method.
* @param target The method that is to be invoked as a delegate.
* @return A binding representing this attempt to bind the {@code source} method to the {@code target} method.
*/
MethodBinding bind(Implementation.Target implementationTarget,
MethodDescription source,
MethodDescription target);
/**
* Implementations are used as delegates for invoking a method that was bound
* using a {@link net.bytebuddy.implementation.bind.MethodDelegationBinder}.
*/
interface MethodInvoker {
/**
* Creates a method invocation for a given method.
*
* @param methodDescription The method to be invoked.
* @return A stack manipulation encapsulating this method invocation.
*/
StackManipulation invoke(MethodDescription methodDescription);
/**
* A simple method invocation that merely uses the most general form of method invocation as provided by
* {@link net.bytebuddy.implementation.bytecode.member.MethodInvocation}.
*/
enum Simple implements MethodInvoker {
/**
* The singleton instance.
*/
INSTANCE;
@Override
public StackManipulation invoke(MethodDescription methodDescription) {
return MethodInvocation.invoke(methodDescription);
}
@Override
public String toString() {
return "MethodDelegationBinder.MethodInvoker.Simple." + name();
}
}
/**
* A method invocation that enforces a virtual invocation that is dispatched on a given type.
*/
class Virtual implements MethodInvoker {
/**
* The type on which a method should be invoked virtually.
*/
private final TypeDescription typeDescription;
/**
* Creates an immutable method invoker that dispatches all methods on a given type.
*
* @param typeDescription The type on which the method is invoked by virtual invocation.
*/
public Virtual(TypeDescription typeDescription) {
this.typeDescription = typeDescription;
}
@Override
public StackManipulation invoke(MethodDescription methodDescription) {
return MethodInvocation.invoke(methodDescription).virtual(typeDescription);
}
@Override
public boolean equals(Object other) {
return this == other || !(other == null || getClass() != other.getClass())
&& typeDescription.equals(((Virtual) other).typeDescription);
}
@Override
public int hashCode() {
return typeDescription.hashCode();
}
@Override
public String toString() {
return "MethodDelegationBinder.MethodInvoker.Virtual{typeDescription=" + typeDescription + '}';
}
}
}
/**
* A binding attempt for a single parameter. Implementations of this type are a suggestion of composing a
* {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.MethodBinding}
* by using a
* {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.MethodBinding.Builder}.
* However, method bindings can also be composed without this type which is merely a suggestion.
*
* @param <T> The type of the identification token for this parameter binding.
*/
interface ParameterBinding<T> extends StackManipulation {
/**
* Returns an identification token for this binding.
*
* @return An identification token unique to this binding.
*/
T getIdentificationToken();
/**
* A singleton representation of an illegal binding for a method parameter. An illegal binding usually
* suggests that a source method cannot be bound to a specific target method.
*/
enum Illegal implements ParameterBinding<Void> {
/**
* The singleton instance.
*/
INSTANCE;
@Override
public Void getIdentificationToken() {
throw new IllegalStateException();
}
@Override
public boolean isValid() {
return false;
}
@Override
public Size apply(MethodVisitor methodVisitor, Implementation.Context implementationContext) {
throw new IllegalStateException("An illegal parameter binding must not be applied");
}
@Override
public String toString() {
return "MethodDelegationBinder.ParameterBinding.Illegal." + name();
}
}
/**
* An anonymous binding of a target method parameter.
*/
class Anonymous implements ParameterBinding<Object> {
/**
* A pseudo-token that is not exposed and therefore anonymous.
*/
private final Object anonymousToken;
/**
* The stack manipulation that represents the loading of the parameter binding onto the stack.
*/
private final StackManipulation delegate;
/**
* Creates a new, anonymous parameter binding.
*
* @param delegate The stack manipulation that is responsible for loading the parameter value for this
* target method parameter onto the stack.
*/
public Anonymous(StackManipulation delegate) {
this.delegate = delegate;
anonymousToken = new Object();
}
@Override
public Object getIdentificationToken() {
return anonymousToken;
}
@Override
public boolean isValid() {
return delegate.isValid();
}
@Override
public Size apply(MethodVisitor methodVisitor, Implementation.Context implementationContext) {
return delegate.apply(methodVisitor, implementationContext);
}
@Override
public boolean equals(Object other) {
return this == other || !(other == null || getClass() != other.getClass())
&& delegate.equals(((Anonymous) other).delegate);
}
@Override
public int hashCode() {
return 31 * delegate.hashCode();
}
@Override
public String toString() {
return "MethodDelegationBinder.ParameterBinding.Anonymous{" +
"anonymousToken=" + anonymousToken +
", delegate=" + delegate +
'}';
}
}
/**
* A uniquely identifiable parameter binding for a target method. Such bindings are usually later processed by
* a {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.AmbiguityResolver}
* in order to resolve binding conflicts between several bindable target methods to the same source method.
*
* @param <T> The type of the identification token.
* @see net.bytebuddy.implementation.bind.MethodDelegationBinder.AmbiguityResolver
*/
class Unique<T> implements ParameterBinding<T> {
/**
* The token that identifies this parameter binding as unique.
*/
private final T identificationToken;
/**
* The stack manipulation that represents the loading of the parameter binding onto the stack.
*/
private final StackManipulation delegate;
/**
* Creates a new unique parameter binding representant.
*
* @param delegate The stack manipulation that loads the argument for this parameter onto the operand stack.
* @param identificationToken The token used for identifying this parameter binding.
*/
public Unique(StackManipulation delegate, T identificationToken) {
this.delegate = delegate;
this.identificationToken = identificationToken;
}
/**
* A factory method for creating a unique binding that infers the tokens type.
*
* @param delegate The stack manipulation delegate.
* @param identificationToken The identification token.
* @param <S> The type of the identification token.
* @return A new instance representing this unique binding.
*/
public static <S> Unique<S> of(StackManipulation delegate, S identificationToken) {
return new Unique<S>(delegate, identificationToken);
}
@Override
public T getIdentificationToken() {
return identificationToken;
}
@Override
public boolean isValid() {
return delegate.isValid();
}
@Override
public Size apply(MethodVisitor methodVisitor, Implementation.Context implementationContext) {
return delegate.apply(methodVisitor, implementationContext);
}
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
Unique<?> unique = (Unique<?>) other;
return identificationToken.equals(unique.identificationToken) && delegate.equals(unique.delegate);
}
@Override
public int hashCode() {
int result = identificationToken.hashCode();
result = 31 * result + delegate.hashCode();
return result;
}
@Override
public String toString() {
return "MethodDelegationBinder.ParameterBinding.Unique{" +
"identificationToken=" + identificationToken +
", delegate=" + delegate +
'}';
}
}
}
/**
* A binding attempt created by a
* {@link net.bytebuddy.implementation.bind.MethodDelegationBinder}.
*/
interface MethodBinding extends StackManipulation {
/**
* Returns the target method's parameter index for a given parameter binding token.
* <p> </p>
* A binding token can be any object
* that implements valid {@link Object#hashCode()} and {@link Object#equals(Object)} methods in order
* to look up a given binding. This way, two bindings can be evaluated of having performed a similar type of
* binding such that these bindings can be compared and a dominant binding can be identified by an
* {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.AmbiguityResolver}.
* Furthermore, a binding is implicitly required to insure the uniqueness of such a parameter binding.
*
* @param parameterBindingToken A token which is used to identify a specific unique binding for a given parameter
* of the target method.
* @return The target method's parameter index of this binding or {@code null} if no such argument binding
* was applied for this binding.
*/
Integer getTargetParameterIndex(Object parameterBindingToken);
/**
* Returns the target method of the method binding attempt.
*
* @return The target method to which the
*/
MethodDescription getTarget();
/**
* Representation of an attempt to bind a source method to a target method that is not applicable.
*
* @see net.bytebuddy.implementation.bind.MethodDelegationBinder
*/
enum Illegal implements MethodBinding {
/**
* The singleton instance.
*/
INSTANCE;
@Override
public Integer getTargetParameterIndex(Object parameterBindingToken) {
throw new IllegalStateException("Method is not bound");
}
@Override
public MethodDescription getTarget() {
throw new IllegalStateException("Method is not bound");
}
@Override
public boolean isValid() {
return false;
}
@Override
public Size apply(MethodVisitor methodVisitor, Implementation.Context implementationContext) {
throw new IllegalStateException("Cannot delegate to an unbound method");
}
@Override
public String toString() {
return "MethodDelegationBinder.MethodBinding.Illegal." + name();
}
}
/**
* A mutable builder that allows to compose a
* {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.MethodBinding}
* by adding parameter bindings incrementally.
*/
class Builder {
/**
* The method invoker for invoking the actual method that is bound.
*/
private final MethodInvoker methodInvoker;
/**
* The target method that for which a binding is to be constructed by this builder..
*/
private final MethodDescription target;
/**
* The current list of stack manipulations for loading values for each parameter onto the operand stack.
*/
private final List<StackManipulation> parameterStackManipulations;
/**
* A mapping of identification tokens to the parameter index they were bound for.
*/
private final LinkedHashMap<Object, Integer> registeredTargetIndices;
/**
* The index of the next parameter that is to be bound.
*/
private int nextParameterIndex;
/**
* Creates a new builder for the binding of a given method.
*
* @param methodInvoker The method invoker that is used to create the method invocation of the {@code target}
* method.
* @param target The target method that is target of the binding.
*/
public Builder(MethodInvoker methodInvoker, MethodDescription target) {
this.methodInvoker = methodInvoker;
this.target = target;
parameterStackManipulations = new ArrayList<StackManipulation>(target.getParameters().size());
registeredTargetIndices = new LinkedHashMap<Object, Integer>(target.getParameters().size());
nextParameterIndex = 0;
}
/**
* Appends a stack manipulation for the next parameter of the target method.
*
* @param parameterBinding A binding representing the next subsequent parameter of the method.
* @return {@code false} if the {@code parameterBindingToken} was already bound. A conflicting binding should
* usually abort the attempt of binding a method and this {@code Builder} should be discarded.
*/
public boolean append(ParameterBinding<?> parameterBinding) {
parameterStackManipulations.add(parameterBinding);
return registeredTargetIndices.put(parameterBinding.getIdentificationToken(), nextParameterIndex++) == null;
}
/**
* Creates a binding that represents the bindings collected by this {@code Builder}.
*
* @param terminatingManipulation A stack manipulation that is applied after the method invocation.
* @return A binding representing the parameter bindings collected by this builder.
*/
public MethodBinding build(StackManipulation terminatingManipulation) {
if (target.getParameters().size() != nextParameterIndex) {
throw new IllegalStateException("The number of parameters bound does not equal the target's number of parameters");
}
return new Build(target,
registeredTargetIndices,
methodInvoker.invoke(target),
parameterStackManipulations,
terminatingManipulation);
}
/**
* Returns the current parameter index that will be bound on the next call of
* {@link Builder#append(net.bytebuddy.implementation.bind.MethodDelegationBinder.ParameterBinding)}.
*
* @return The next index to be bound.
*/
public int getNextParameterIndex() {
return nextParameterIndex;
}
@Override
public String toString() {
return "MethodDelegationBinder.MethodBinding.Builder{" +
"methodInvoker=" + methodInvoker +
", target=" + target +
", parameterStackManipulations=" + parameterStackManipulations +
", registeredTargetIndices=" + registeredTargetIndices +
", nextParameterIndex=" + nextParameterIndex +
'}';
}
/**
* A method binding that was created by a
* {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.MethodBinding.Builder}.
*/
protected static class Build implements MethodBinding {
/**
* The target method this binding represents.
*/
private final MethodDescription target;
/**
* A map of identification tokens to the indices of their binding parameters.
*/
private final Map<?, Integer> registeredTargetIndices;
/**
* A stack manipulation that represents the actual method invocation.
*/
private final StackManipulation methodInvocation;
/**
* A list of manipulations that each represent the loading of a parameter value onto the operand stack.
*/
private final List<StackManipulation> parameterStackManipulations;
/**
* The stack manipulation that is applied after the method invocation.
*/
private final StackManipulation terminatingStackManipulation;
/**
* Creates a new method binding.
*
* @param target The target method this binding represents.
* @param registeredTargetIndices A map of identification tokens to the indices of their binding
* parameters.
* @param methodInvocation A stack manipulation that represents the actual method invocation.
* @param parameterStackManipulations A list of manipulations that each represent the loading of a
* parameter value onto the operand stack.
* @param terminatingStackManipulation The stack manipulation that is applied after the method invocation.
*/
protected Build(MethodDescription target,
Map<?, Integer> registeredTargetIndices,
StackManipulation methodInvocation,
List<StackManipulation> parameterStackManipulations,
StackManipulation terminatingStackManipulation) {
this.target = target;
this.registeredTargetIndices = new HashMap<Object, Integer>(registeredTargetIndices);
this.methodInvocation = methodInvocation;
this.parameterStackManipulations = new ArrayList<StackManipulation>(parameterStackManipulations);
this.terminatingStackManipulation = terminatingStackManipulation;
}
@Override
public boolean isValid() {
boolean result = methodInvocation.isValid() && terminatingStackManipulation.isValid();
Iterator<StackManipulation> assignment = parameterStackManipulations.iterator();
while (result && assignment.hasNext()) {
result = assignment.next().isValid();
}
return result;
}
@Override
public Integer getTargetParameterIndex(Object parameterBindingToken) {
return registeredTargetIndices.get(parameterBindingToken);
}
@Override
public MethodDescription getTarget() {
return target;
}
@Override
public Size apply(MethodVisitor methodVisitor, Implementation.Context implementationContext) {
Size size = new Size(0, 0);
for (StackManipulation stackManipulation : parameterStackManipulations) {
size = size.aggregate(stackManipulation.apply(methodVisitor, implementationContext));
}
size = size.aggregate(methodInvocation.apply(methodVisitor, implementationContext));
return size.aggregate(terminatingStackManipulation.apply(methodVisitor, implementationContext));
}
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
Build build = (Build) other;
return methodInvocation.equals(build.methodInvocation)
&& parameterStackManipulations.equals(build.parameterStackManipulations)
&& registeredTargetIndices.equals(build.registeredTargetIndices)
&& terminatingStackManipulation.equals(build.terminatingStackManipulation)
&& target.equals(build.target);
}
@Override
public int hashCode() {
int result = target.hashCode();
result = 31 * result + registeredTargetIndices.hashCode();
result = 31 * result + methodInvocation.hashCode();
result = 31 * result + parameterStackManipulations.hashCode();
result = 31 * result + terminatingStackManipulation.hashCode();
return result;
}
@Override
public String toString() {
return "MethodDelegationBinder.MethodBinding.Builder.Build{" +
"target=" + target +
", registeredTargetIndices=" + registeredTargetIndices +
", methodInvocation=" + methodInvocation +
", parameterStackManipulations=" + parameterStackManipulations +
", terminatingStackManipulation=" + terminatingStackManipulation +
'}';
}
}
}
}
/**
* Implementations of this interface are able to attempt the resolution of two successful bindings of a method
* to two different target methods in order to identify a dominating binding.
*/
interface AmbiguityResolver {
/**
* Attempts to resolve to conflicting bindings.
*
* @param source The source method that was bound to both target methods.
* @param left The first successful binding of the {@code source} method.
* @param right The second successful binding of the {@code source} method.
* @return The resolution state when resolving a conflicting binding where
* {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.AmbiguityResolver.Resolution#LEFT}
* indicates a successful binding to the {@code left} binding while
* {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.AmbiguityResolver.Resolution#RIGHT}
* indicates a successful binding to the {@code right} binding.
*/
Resolution resolve(MethodDescription source, MethodBinding left, MethodBinding right);
/**
* A resolution state of an attempt to resolve two conflicting bindings.
*/
enum Resolution {
/**
* Describes a resolution state where no information about dominance could be gathered.
*/
UNKNOWN(true),
/**
* Describes a resolution state where the left method dominates the right method.
*/
LEFT(false),
/**
* Describes a resolution state where the right method dominates the left method.
*/
RIGHT(false),
/**
* Describes a resolution state where both methods have inflicting dominance over each other.
*/
AMBIGUOUS(true);
/**
* {@code true} if this resolution is unresolved.
*/
private final boolean unresolved;
/**
* Creates a new resolution.
*
* @param unresolved {@code true} if this resolution is unresolved.
*/
Resolution(boolean unresolved) {
this.unresolved = unresolved;
}
/**
* Checks if this binding is unresolved.
*
* @return {@code true} if this binding is unresolved.
*/
public boolean isUnresolved() {
return unresolved;
}
/**
* Merges two resolutions in order to determine their compatibility.
*
* @param other The resolution this resolution is to be checked against.
* @return The merged resolution.
*/
public Resolution merge(Resolution other) {
switch (this) {
case UNKNOWN:
return other;
case AMBIGUOUS:
return AMBIGUOUS;
case LEFT:
case RIGHT:
return other == this ? this : AMBIGUOUS;
default:
throw new AssertionError();
}
}
@Override
public String toString() {
return "MethodDelegationBinder.AmbiguityResolver.Resolution." + name();
}
}
/**
* An ambiguity resolver that does not attempt to resolve a conflicting binding.
*/
enum NoOp implements AmbiguityResolver {
/**
* The singleton instance.
*/
INSTANCE;
@Override
public Resolution resolve(MethodDescription source, MethodBinding left, MethodBinding right) {
return Resolution.UNKNOWN;
}
@Override
public String toString() {
return "MethodDelegationBinder.AmbiguityResolver.NoOp." + name();
}
}
/**
* A chain of {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.AmbiguityResolver}s
* that are applied in the given order until two bindings can be resolved.
*/
class Chain implements AmbiguityResolver {
/**
* A list of ambiguity resolvers that are applied by this chain in their order of application.
*/
private final List<AmbiguityResolver> ambiguityResolvers;
/**
* Creates an immutable chain of ambiguity resolvers.
*
* @param ambiguityResolver The ambiguity resolvers to chain in the order of their application.
*/
protected Chain(AmbiguityResolver... ambiguityResolver) {
ambiguityResolvers = unchained(Arrays.asList(ambiguityResolver));
}
/**
* Chains a given number of ambiguity resolvers.
*
* @param ambiguityResolver The ambiguity resolvers to chain in the order of their application.
* @return A chained ambiguity resolver representing the given ambiguity resolvers.
*/
public static AmbiguityResolver of(AmbiguityResolver... ambiguityResolver) {
if (ambiguityResolver.length == 1) {
return ambiguityResolver[0];
} else {
return new Chain(ambiguityResolver);
}
}
/**
* Removes all
* {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.AmbiguityResolver.Chain}s
* from a list of ambiguity resolvers by extracting the ambiguity resolvers they actually represent in the
* same order.
*
* @param chained A list of ambiguity resolvers that might contains chains.
* @return A list without such chains but with the ambiguity resolvers they actually represent.
*/
private static List<AmbiguityResolver> unchained(List<AmbiguityResolver> chained) {
List<AmbiguityResolver> ambiguityResolvers = new ArrayList<AmbiguityResolver>();
for (AmbiguityResolver ambiguityResolver : chained) {
if (ambiguityResolver instanceof Chain) {
ambiguityResolvers.addAll(unchained(((Chain) ambiguityResolver).ambiguityResolvers));
} else {
ambiguityResolvers.add(ambiguityResolver);
}
}
return ambiguityResolvers;
}
@Override
public Resolution resolve(MethodDescription source,
MethodBinding left,
MethodBinding right) {
Resolution resolution = Resolution.UNKNOWN;
Iterator<AmbiguityResolver> iterator = ambiguityResolvers.iterator();
while (resolution.isUnresolved() && iterator.hasNext()) {
resolution = iterator.next().resolve(source, left, right);
}
return resolution;
}
@Override
public boolean equals(Object other) {
return this == other || !(other == null || getClass() != other.getClass())
&& ambiguityResolvers.equals(((Chain) other).ambiguityResolvers);
}
@Override
public int hashCode() {
return ambiguityResolvers.hashCode();
}
@Override
public String toString() {
return "MethodDelegationBinder.AmbiguityResolver.Chain{ambiguityResolvers=" + ambiguityResolvers + '}';
}
}
}
/**
* A helper class that allows to identify a best binding for a given type and source method chosing from a list of given
* target methods by using a given {@link net.bytebuddy.implementation.bind.MethodDelegationBinder}
* and an {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.AmbiguityResolver}.
* <p> </p>
* The {@code Processor} will:
* <ol>
* <li>Try to bind the {@code source} method using the {@code MethodDelegationBinder}.</li>
* <li>Find a best method among the successful bindings using the {@code AmbiguityResolver}.</li>
* </ol>
*/
class Processor {
/**
* Represents the index of the only value of two elements in a list.
*/
private static final int ONLY = 0;
/**
* Represents the index of the left value of two elements in a list.
*/
private static final int LEFT = 0;
/**
* Represents the index of the right value of two elements in a list.
*/
private static final int RIGHT = 1;
/**
* This processor's method delegation binder.
*/
private final MethodDelegationBinder methodDelegationBinder;
/**
* The processor's ambiguity resolver.
*/
private final AmbiguityResolver ambiguityResolver;
/**
* Creates a new processor for a method delegation binder.
*
* @param methodDelegationBinder This processor's method delegation binder.
* @param ambiguityResolver The processor's ambiguity resolver.
*/
public Processor(MethodDelegationBinder methodDelegationBinder,
AmbiguityResolver ambiguityResolver) {
this.methodDelegationBinder = methodDelegationBinder;
this.ambiguityResolver = ambiguityResolver;
}
/**
* Returns the {@link net.bytebuddy.implementation.bind.MethodDelegationBinder}
* used by this {@code Processor}.
*
* @return The method delegation binder used by this {@code Processor}.
*/
public MethodDelegationBinder getMethodDelegationBinder() {
return methodDelegationBinder;
}
/**
* Returns the {@link net.bytebuddy.implementation.bind.MethodDelegationBinder.AmbiguityResolver}
* used by this {@code Processor}.
*
* @return The ambiguity resolver used by this {@code Processor}.
*/
public AmbiguityResolver getAmbiguityResolver() {
return ambiguityResolver;
}
/**
* @param implementationTarget The implementation target for binding the {@code source} method to.
* @param source The source method that is to be bound.
* @param targets All possible targets for the delegation binding that are to be considered.
* @return The best binding that was identified. If no such binding can be identified, an exception is thrown.
*/
public MethodBinding process(Implementation.Target implementationTarget,
MethodDescription source,
Iterable<? extends MethodDescription> targets) {
List<MethodBinding> possibleDelegations = bind(implementationTarget, source, targets);
if (possibleDelegations.size() == 0) {
throw new IllegalArgumentException("No method can be bound to " + source);
}
return resolve(source, possibleDelegations);
}
/**
* Creates a list of method bindings for any legal target method.
*
* @param implementationTarget The implementation target for binding the {@code source} method to.
* @param source The method that is to be bound to any {@code targets} method.
* @param targets All possible targets for the delegation binding that are to be considered.
* @return A list of valid method bindings representing a subset of the given target methods.
*/
private List<MethodBinding> bind(Implementation.Target implementationTarget,
MethodDescription source,
Iterable<? extends MethodDescription> targets) {
List<MethodBinding> possibleDelegations = new LinkedList<MethodBinding>();
for (MethodDescription target : targets) {
if (target.isVisibleTo(implementationTarget.getTypeDescription())) {
MethodBinding methodBinding = methodDelegationBinder.bind(implementationTarget, source, target);
if (methodBinding.isValid()) {
possibleDelegations.add(methodBinding);
}
}
}
return possibleDelegations;
}
/**
* Resolves the most specific target method of a list of legal method bindings.
*
* @param source The source method that is to be bound.
* @param targets A list of possible binding targets.
* @return The most specific method binding that was located from the given list of candidate targets.
*/
private MethodBinding resolve(MethodDescription source,
List<MethodBinding> targets) {
switch (targets.size()) {
case 1:
return targets.get(ONLY);
case 2: {
MethodBinding left = targets.get(LEFT);
MethodBinding right = targets.get(RIGHT);
switch (ambiguityResolver.resolve(source, left, right)) {
case LEFT:
return left;
case RIGHT:
return right;
case AMBIGUOUS:
case UNKNOWN:
throw new IllegalArgumentException(String.format("Could not resolve ambiguous delegation " +
"of %s: %s or %s", source, left, right));
default:
throw new AssertionError();
}
}
default: /* case 3+: */ {
MethodBinding left = targets.get(LEFT);
MethodBinding right = targets.get(RIGHT);
switch (ambiguityResolver.resolve(source, left, right)) {
case LEFT:
targets.remove(RIGHT);
return resolve(source, targets);
case RIGHT:
targets.remove(LEFT);
return resolve(source, targets);
case AMBIGUOUS:
case UNKNOWN:
targets.remove(RIGHT); // Remove right element first due to index alteration!
targets.remove(LEFT);
MethodBinding subResult = resolve(source, targets);
switch (ambiguityResolver.resolve(source, left, subResult).merge(ambiguityResolver.resolve(source, right, subResult))) {
case RIGHT:
return subResult;
case LEFT:
case AMBIGUOUS:
case UNKNOWN:
throw new IllegalArgumentException("Could not resolve ambiguous delegation to either " + left + " or " + right);
default:
throw new AssertionError();
}
default:
throw new AssertionError();
}
}
}
}
@Override
public boolean equals(Object other) {
return this == other || !(other == null || getClass() != other.getClass())
&& ambiguityResolver.equals(((Processor) other).ambiguityResolver)
&& methodDelegationBinder.equals(((Processor) other).methodDelegationBinder);
}
@Override
public int hashCode() {
return 31 * methodDelegationBinder.hashCode() + ambiguityResolver.hashCode();
}
@Override
public String toString() {
return "MethodDelegationBinder.Processor{"
+ "methodDelegationBinder=" + methodDelegationBinder
+ ", ambiguityResolver=" + ambiguityResolver + '}';
}
}
}
| apache-2.0 |
SourceStudyNotes/log4j2 | src/main/java/org/apache/logging/log4j/core/appender/AbstractWriterAppender.java | 4145 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.appender;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.logging.log4j.core.Filter;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.StringLayout;
/**
* Appends log events as strings to a writer.
*
* @param <M>
* The kind of {@link WriterManager} under management
*/
public abstract class AbstractWriterAppender<M extends WriterManager> extends AbstractAppender {
private static final long serialVersionUID = 1L;
/**
* Immediate flush means that the underlying writer will be flushed at the
* end of each append operation. Immediate flush is slower but ensures that
* each append request is actually written. If <code>immediateFlush</code>
* is set to {@code false}, then there is a good chance that the last few
* logs events are not actually written to persistent media if and when the
* application crashes.
*/
protected final boolean immediateFlush;
private final M manager;
private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
private final Lock readLock = readWriteLock.readLock();
/**
* Instantiates.
*
* @param name
* The name of the Appender.
* @param layout
* The layout to format the message.
* @param manager
* The OutputStreamManager.
*/
protected AbstractWriterAppender(final String name, final StringLayout layout, final Filter filter,
final boolean ignoreExceptions, final boolean immediateFlush, final M manager) {
super(name, filter, layout, ignoreExceptions);
this.manager = manager;
this.immediateFlush = immediateFlush;
}
/**
* Actual writing occurs here.
* <p>
* Most subclasses will need to override this method.
* </p>
*
* @param event
* The LogEvent.
*/
@Override
public void append(final LogEvent event) {
readLock.lock();
try {
final String str = getStringLayout().toSerializable(event);
if (str.length() > 0) {
manager.write(str);
if (this.immediateFlush || event.isEndOfBatch()) {
manager.flush();
}
}
} catch (final AppenderLoggingException ex) {
error("Unable to write " + manager.getName() + " for appender " + getName() + ": " + ex);
throw ex;
} finally {
readLock.unlock();
}
}
/**
* Gets the manager.
*
* @return the manager.
*/
public M getManager() {
return manager;
}
public StringLayout getStringLayout() {
return (StringLayout) getLayout();
}
@Override
public void start() {
if (getLayout() == null) {
LOGGER.error("No layout set for the appender named [{}].", getName());
}
if (manager == null) {
LOGGER.error("No OutputStreamManager set for the appender named [{}].", getName());
}
super.start();
}
@Override
public void stop() {
super.stop();
manager.release();
}
}
| apache-2.0 |
Salaboy/drools-game-engine | drools-game-engine-core-impl/src/main/java/org/drools/game/core/GameMessageServiceImpl.java | 1112 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.game.core;
import org.drools.game.core.api.GameMessage;
import org.drools.game.core.api.GameMessageService;
public class GameMessageServiceImpl implements GameMessageService {
@Override
public GameMessage newGameMessage( String playerName, String text ) {
return new BaseGameMessageImpl( playerName, text );
}
@Override
public GameMessage newGameMessage( String text ) {
return new BaseGameMessageImpl( "system", text );
}
}
| apache-2.0 |
sundrio/sundrio | maven-plugin/src/main/java/io/sundr/maven/filter/ArtifactFilter.java | 783 | /*
* Copyright 2015 The original authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.sundr.maven.filter;
import org.apache.maven.artifact.Artifact;
public interface ArtifactFilter {
Artifact apply(Artifact artifact);
}
| apache-2.0 |
taktos/ea2ddl | ea2ddl-dao/src/main/java/jp/sourceforge/ea2ddl/dao/cbean/nss/TFilesNss.java | 934 | package jp.sourceforge.ea2ddl.dao.cbean.nss;
import jp.sourceforge.ea2ddl.dao.cbean.cq.TFilesCQ;
/**
* The nest select set-upper of t_files.
* @author DBFlute(AutoGenerator)
*/
public class TFilesNss {
protected TFilesCQ _query;
public TFilesNss(TFilesCQ query) { _query = query; }
public boolean hasConditionQuery() { return _query != null; }
// ===================================================================================
// With Nested Foreign Table
// =========================
// ===================================================================================
// With Nested Referrer Table
// ==========================
}
| apache-2.0 |
SkySeraph-XKnife/XKnife-Android | app/src/androidTest/java/com/skyseraph/xknife/ExampleInstrumentedTest.java | 744 | package com.skyseraph.xknife;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumentation test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() throws Exception {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();
assertEquals("com.skyseraph.xknife", appContext.getPackageName());
}
}
| apache-2.0 |
thedrummeraki/Aki-SSL | src/aki/packages/x509/Dumpable.java | 146 | package aki.packages.x509;
/**
* Created by aakintol on 28/06/16.
*/
public interface Dumpable {
byte[] dumpDER();
String dumpPEM();
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-sqs/src/main/java/com/amazonaws/services/sqs/model/PurgeQueueRequest.java | 5042 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.sqs.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p/>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/sqs-2012-11-05/PurgeQueue" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class PurgeQueueRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The URL of the queue from which the <code>PurgeQueue</code> action deletes messages.
* </p>
* <p>
* Queue URLs and names are case-sensitive.
* </p>
*/
private String queueUrl;
/**
* Default constructor for PurgeQueueRequest object. Callers should use the setter or fluent setter (with...)
* methods to initialize the object after creating it.
*/
public PurgeQueueRequest() {
}
/**
* Constructs a new PurgeQueueRequest object. Callers should use the setter or fluent setter (with...) methods to
* initialize any additional object members.
*
* @param queueUrl
* The URL of the queue from which the <code>PurgeQueue</code> action deletes messages.</p>
* <p>
* Queue URLs and names are case-sensitive.
*/
public PurgeQueueRequest(String queueUrl) {
setQueueUrl(queueUrl);
}
/**
* <p>
* The URL of the queue from which the <code>PurgeQueue</code> action deletes messages.
* </p>
* <p>
* Queue URLs and names are case-sensitive.
* </p>
*
* @param queueUrl
* The URL of the queue from which the <code>PurgeQueue</code> action deletes messages.</p>
* <p>
* Queue URLs and names are case-sensitive.
*/
public void setQueueUrl(String queueUrl) {
this.queueUrl = queueUrl;
}
/**
* <p>
* The URL of the queue from which the <code>PurgeQueue</code> action deletes messages.
* </p>
* <p>
* Queue URLs and names are case-sensitive.
* </p>
*
* @return The URL of the queue from which the <code>PurgeQueue</code> action deletes messages.</p>
* <p>
* Queue URLs and names are case-sensitive.
*/
public String getQueueUrl() {
return this.queueUrl;
}
/**
* <p>
* The URL of the queue from which the <code>PurgeQueue</code> action deletes messages.
* </p>
* <p>
* Queue URLs and names are case-sensitive.
* </p>
*
* @param queueUrl
* The URL of the queue from which the <code>PurgeQueue</code> action deletes messages.</p>
* <p>
* Queue URLs and names are case-sensitive.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PurgeQueueRequest withQueueUrl(String queueUrl) {
setQueueUrl(queueUrl);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getQueueUrl() != null)
sb.append("QueueUrl: ").append(getQueueUrl());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof PurgeQueueRequest == false)
return false;
PurgeQueueRequest other = (PurgeQueueRequest) obj;
if (other.getQueueUrl() == null ^ this.getQueueUrl() == null)
return false;
if (other.getQueueUrl() != null && other.getQueueUrl().equals(this.getQueueUrl()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getQueueUrl() == null) ? 0 : getQueueUrl().hashCode());
return hashCode;
}
@Override
public PurgeQueueRequest clone() {
return (PurgeQueueRequest) super.clone();
}
}
| apache-2.0 |
micrometer-metrics/micrometer | micrometer-binders/src/main/java/io/micrometer/binder/jvm/JvmThreadMetrics.java | 3734 | /*
* Copyright 2017 VMware, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micrometer.binder.jvm;
import io.micrometer.core.instrument.Gauge;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import io.micrometer.core.instrument.Tags;
import io.micrometer.core.instrument.binder.BaseUnits;
import io.micrometer.core.instrument.binder.MeterBinder;
import io.micrometer.core.lang.NonNullApi;
import io.micrometer.core.lang.NonNullFields;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadMXBean;
import java.util.Arrays;
import static java.util.Collections.emptyList;
/**
* {@link MeterBinder} for JVM threads.
*
* @author Jon Schneider
* @author Johnny Lim
*/
@NonNullApi
@NonNullFields
public class JvmThreadMetrics implements MeterBinder {
private final Iterable<Tag> tags;
public JvmThreadMetrics() {
this(emptyList());
}
public JvmThreadMetrics(Iterable<Tag> tags) {
this.tags = tags;
}
@Override
public void bindTo(MeterRegistry registry) {
ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
Gauge.builder("jvm.threads.peak", threadBean, ThreadMXBean::getPeakThreadCount)
.tags(tags)
.description("The peak live thread count since the Java virtual machine started or peak was reset")
.baseUnit(BaseUnits.THREADS)
.register(registry);
Gauge.builder("jvm.threads.daemon", threadBean, ThreadMXBean::getDaemonThreadCount)
.tags(tags)
.description("The current number of live daemon threads")
.baseUnit(BaseUnits.THREADS)
.register(registry);
Gauge.builder("jvm.threads.live", threadBean, ThreadMXBean::getThreadCount)
.tags(tags)
.description("The current number of live threads including both daemon and non-daemon threads")
.baseUnit(BaseUnits.THREADS)
.register(registry);
try {
threadBean.getAllThreadIds();
for (Thread.State state : Thread.State.values()) {
Gauge.builder("jvm.threads.states", threadBean, (bean) -> getThreadStateCount(bean, state))
.tags(Tags.concat(tags, "state", getStateTagValue(state)))
.description("The current number of threads having " + state + " state")
.baseUnit(BaseUnits.THREADS)
.register(registry);
}
} catch (Error error) {
// An error will be thrown for unsupported operations
// e.g. SubstrateVM does not support getAllThreadIds
}
}
// VisibleForTesting
static long getThreadStateCount(ThreadMXBean threadBean, Thread.State state) {
return Arrays.stream(threadBean.getThreadInfo(threadBean.getAllThreadIds()))
.filter(threadInfo -> threadInfo != null && threadInfo.getThreadState() == state)
.count();
}
private static String getStateTagValue(Thread.State state) {
return state.name().toLowerCase().replace("_", "-");
}
}
| apache-2.0 |
android-art-intel/marshmallow | art-extension/opttests/src/OptimizationTests/ShortMethodsInliningNonVirtualInvokes/InvokeStaticAThrowNullSet_001/Main.java | 1609 | /*
* Copyright (C) 2015 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package OptimizationTests.ShortMethodsInliningNonVirtualInvokes.InvokeStaticAThrowNullSet_001;
// The test checks that stack after NullPointerException occurs is correct despite inlining
class Main {
final static int iterations = 10;
static int[] thingiesArray = new int[iterations];
public static int getThingies(int[] arr, int i) {
return arr[i];
}
public static void setThingies(int[] arr, int newThingy, int i) {
arr[i] = newThingy;
}
public static void main(String[] args) {
int nextThingy = -10;
int sumArrElements = 0;
for(int i = 0; i < iterations; i++) {
thingiesArray[i] = i;
sumArrElements = sumArrElements + thingiesArray[i];
}
for(int i = 0; i < iterations; i++) {
nextThingy = getThingies(thingiesArray, i) - i*1;
if (i == iterations - 1)
thingiesArray = null;
setThingies(thingiesArray, nextThingy, i);
}
}
}
| apache-2.0 |
FiveTwoTeam/LogisticsManagementSystem | src/main/java/lms/service/impl/StationRepetoryItemServiceImpl.java | 1265 | package lms.service.impl;
import lms.mapper.LmsStationRepetoryItemMapper;
import lms.model.LmsStationRepetoryItem;
import lms.service.StationRepetoryItemService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
public class StationRepetoryItemServiceImpl implements StationRepetoryItemService {
@Autowired
private LmsStationRepetoryItemMapper lmsStationRepetoryItemMapper;
public List<LmsStationRepetoryItem> findAll() {
List<LmsStationRepetoryItem> list = lmsStationRepetoryItemMapper.findproductAndStationRepetoryMap();
return list;
}
public void addStationRepetoryItem(LmsStationRepetoryItem lmsStationRepetoryItem) {
lmsStationRepetoryItemMapper.insert(lmsStationRepetoryItem);
}
public void deleteStationRepetoryItem(Long id) {
lmsStationRepetoryItemMapper.deleteByPrimaryKey(id);
}
public void updateStationRepetoryItem(LmsStationRepetoryItem lmsStationRepetoryItem) {
lmsStationRepetoryItemMapper.updateByPrimaryKey(lmsStationRepetoryItem);
}
public LmsStationRepetoryItem findById(long id) {
return lmsStationRepetoryItemMapper.selectByPrimaryKey(id);
}
}
| apache-2.0 |
CiLiNet-Android/cilinet-go-dutch | GoDutch/src/cilinet/godutch/model/CategoryModel.java | 928 | package cilinet.godutch.model;
import java.io.Serializable;
import java.util.Date;
/** 类别 **/
public class CategoryModel implements Serializable {
/** 类别id **/
public int id;
/** 类别名称 **/
public String name;
/** 类型标记名称 **/
public String typeFlag;
/** 父类别Id **/
public int parentId = 0;
/** 类别路径(面包屑) **/
public String path;
/** 创建日期 **/
public Date createDate = new Date();
/** 状态 0失效 1启用 **/
public int state = 1;
public CategoryModel(int id, String name, String typeFlag, int parentId,
String path, Date createDate, int state) {
this.id = id;
this.name = name;
this.typeFlag = typeFlag;
this.parentId = parentId;
this.path = path;
this.createDate = createDate;
this.state = state;
}
public CategoryModel() {}
public String toString(){
return name;
}
}
| apache-2.0 |
stevenhva/InfoLearn_OpenOLAT | src/main/java/org/olat/ims/qti/editor/beecom/objects/Section.java | 10360 | /**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <hr>
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* This file has been modified by the OpenOLAT community. Changes are licensed
* under the Apache 2.0 license as the original file.
*/
package org.olat.ims.qti.editor.beecom.objects;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.dom4j.Element;
import org.olat.core.util.CodeHelper;
import org.olat.ims.qti.editor.QTIEditHelper;
/**
* @author rkulow
*
*/
public class Section implements QTIObject {
// Assesment Attributes
private String ident = null; // required, max 256 chars
private String title = null; // optional, max 256 chars
// Elements
private QTIObject qticomment = null; // occurs 0 ore 1 time
private QTIObject duration = null; // occurs 0 ore 1 time
private List qtimetadatas = null; // occurs 0 to many
private List sectionprecondition = null; // occurs 0 to many
private List sectionpostcondition = null; // occurs 0 to many
private String objectives = null; // occurs 0 to many, 1st if available is processed
private List rubrics = null; // occurs 0 to many
private List sectioncontrols = new ArrayList(); // occurs 0 to many
private QTIObject presentation_material = null; // occurs 0 to 1 time
private QTIObject outcomes_processing = null; // ?
private QTIObject sectionproc_extension = null; // occurs 0 to 1 time
private List sectionfeedbacks = new ArrayList(); // occurs 0 to many
private SelectionOrdering selection_ordering = null; //?
private QTIObject reference = null; //occurs 0 to 1 time
private List sections = null; // occurs 0 to 1 time ( sections and section_references)
private List<Item> items = new ArrayList<Item>(); // occurs 0 to many (items and item_references)
private boolean alienItems = false;
public Section() {
setIdent(String.valueOf(CodeHelper.getRAMUniqueID()));
setTitle("New Section");
getSectioncontrols().add(new Control());
setSelection_ordering(new SelectionOrdering());
}
/**
* @see org.olat.ims.qti.editor.beecom.QTIObject#toXml()
*/
public void addToElement(Element root) {
// SECTION
Element section = root.addElement("section");
section.addAttribute("ident",this.ident);
section.addAttribute("title",this.title);
// DURATION
QTIObject obj_duration = this.getDuration();
if(obj_duration != null) {
obj_duration.addToElement(section);
}
// OBJECTIVES
QTIEditHelper.addObjectives(section, objectives);
//SECTIONCONTROL
for(Iterator i = this.sectioncontrols.iterator();i.hasNext();) {
QTIObject obj = (QTIObject) i.next();
if(obj!=null) {
obj.addToElement(section);
}
}
//OUTCOMES_PROCESSING
QTIObject obj_outcomes_processing = this.getOutcomes_processing();
if(obj_outcomes_processing!=null) {
obj_outcomes_processing.addToElement(section);
}
// SECTIONFEEDBACK
for(Iterator i= this.sectionfeedbacks.iterator();i.hasNext();) {
QTIObject obj = (QTIObject)i.next();
if(obj!=null) {
obj.addToElement(section);
}
}
//SELECTION ORDERING
SelectionOrdering selectionOrdering = this.getSelection_ordering();
if(selectionOrdering!=null) {
selectionOrdering.addToElement(section);
}
// ITEMS
for(Iterator<Item> i= this.items.iterator(); i.hasNext();) {
QTIObject obj = (QTIObject)i.next();
if(obj!=null) {
obj.addToElement(section);
}
}
}
public boolean checkAlienItems() {
alienItems = false;
for (Iterator<Item> iter = items.iterator(); iter.hasNext();) {
Item element = (Item) iter.next();
alienItems = alienItems || element.isAlient();
}
return alienItems;
}
public boolean hasAlienItems() { return alienItems; }
/**
* Returns the duration.
* @return QTIObject
*/
public QTIObject getDuration() {
return duration;
}
/**
* Returns the ident.
* @return String
*/
public String getIdent() {
return ident;
}
/**
* Returns the items.
* @return List
*/
public List<Item> getItems() {
return items;
}
/**
* Returns the objectives.
* @return List
*/
public String getObjectives() {
return objectives;
}
/**
* Returns the outcomes_processing.
* @return QTIObject
*/
public QTIObject getOutcomes_processing() {
return outcomes_processing;
}
/**
* Returns the presentation_material.
* @return QTIObject
*/
public QTIObject getPresentation_material() {
return presentation_material;
}
/**
* Returns the qticomment.
* @return QTIObject
*/
public QTIObject getQticomment() {
return qticomment;
}
/**
* Returns the qtimetadatas.
* @return List
*/
public List getQtimetadatas() {
return qtimetadatas;
}
/**
* Returns the reference.
* @return QTIObject
*/
public QTIObject getReference() {
return reference;
}
/**
* Returns the rubrics.
* @return List
*/
public List getRubrics() {
return rubrics;
}
/**
* Returns the sectioncontrols.
* @return List
*/
public List getSectioncontrols() {
return sectioncontrols;
}
/**
* Returns the sectionfeedbacks.
* @return List
*/
public List getSectionfeedbacks() {
return sectionfeedbacks;
}
/**
* Returns the sectionpostcondition.
* @return List
*/
public List getSectionpostcondition() {
return sectionpostcondition;
}
/**
* Returns the sectionprecondition.
* @return List
*/
public List getSectionprecondition() {
return sectionprecondition;
}
/**
* Returns the sectionproc_extension.
* @return QTIObject
*/
public QTIObject getSectionproc_extension() {
return sectionproc_extension;
}
/**
* Returns the sections.
* @return List
*/
public List getSections() {
return sections;
}
/**
* Returns the selection_ordering.
* @return SelectionOrdering
*/
public SelectionOrdering getSelection_ordering() {
return selection_ordering;
}
/**
* Returns the title.
* @return String
*/
public String getTitle() {
return title;
}
/**
* Sets the duration.
* @param duration The duration to set
*/
public void setDuration(QTIObject duration) {
this.duration = duration;
}
/**
* Sets the ident.
* @param ident The ident to set
*/
public void setIdent(String ident) {
this.ident = ident;
}
/**
* Sets the items.
* @param items The items to set
*/
public void setItems(List items) {
this.items = items;
checkAlienItems();
}
/**
* Sets the objectives.
* @param objectives The objectives to set
*/
public void setObjectives(String objectives) {
this.objectives = objectives;
}
/**
* Sets the outcomes_processing.
* @param outcomes_processing The outcomes_processing to set
*/
public void setOutcomes_processing(QTIObject outcomes_processing) {
this.outcomes_processing = outcomes_processing;
}
/**
* Sets the presentation_material.
* @param presentation_material The presentation_material to set
*/
public void setPresentation_material(QTIObject presentation_material) {
this.presentation_material = presentation_material;
}
/**
* Sets the qticomment.
* @param qticomment The qticomment to set
*/
public void setQticomment(QTIObject qticomment) {
this.qticomment = qticomment;
}
/**
* Sets the qtimetadatas.
* @param qtimetadatas The qtimetadatas to set
*/
public void setQtimetadatas(List qtimetadatas) {
this.qtimetadatas = qtimetadatas;
}
/**
* Sets the reference.
* @param reference The reference to set
*/
public void setReference(QTIObject reference) {
this.reference = reference;
}
/**
* Sets the rubrics.
* @param rubrics The rubrics to set
*/
public void setRubrics(List rubrics) {
this.rubrics = rubrics;
}
/**
* Sets the sectioncontrols.
* @param sectioncontrols The sectioncontrols to set
*/
public void setSectioncontrols(List sectioncontrols) {
this.sectioncontrols = sectioncontrols;
}
/**
* Sets the sectionfeedbacks.
* @param sectionfeedbacks The sectionfeedbacks to set
*/
public void setSectionfeedbacks(List sectionfeedbacks) {
this.sectionfeedbacks = sectionfeedbacks;
}
/**
* Sets the sectionpostcondition.
* @param sectionpostcondition The sectionpostcondition to set
*/
public void setSectionpostcondition(List sectionpostcondition) {
this.sectionpostcondition = sectionpostcondition;
}
/**
* Sets the sectionprecondition.
* @param sectionprecondition The sectionprecondition to set
*/
public void setSectionprecondition(List sectionprecondition) {
this.sectionprecondition = sectionprecondition;
}
/**
* Sets the sectionproc_extension.
* @param sectionproc_extension The sectionproc_extension to set
*/
public void setSectionproc_extension(QTIObject sectionproc_extension) {
this.sectionproc_extension = sectionproc_extension;
}
/**
* Sets the sections.
* @param sections The sections to set
*/
public void setSections(List sections) {
this.sections = sections;
}
/**
* Sets the selection_ordering.
* @param selection_ordering The selection_ordering to set
*/
public void setSelection_ordering(SelectionOrdering selection_ordering) {
this.selection_ordering = selection_ordering;
}
/**
* Sets the title.
* @param title The title to set
*/
public void setTitle(String title) {
this.title = title;
}
/**
* Checks if this section contains any questions of type 'essay'
* @return
*/
public boolean containsEssayQuestions(){
for(Iterator<Item> i= items.iterator(); i.hasNext();) {
Item item = i.next();
if(item!=null && item.getQuestion().getType() == Question.TYPE_ESSAY) {
return true;
}
}
return false;
}
}
| apache-2.0 |
wjsrobertson/j2menace | AsteroidBelt/src/com/rattat/micro/game/aster/mvc/Model.java | 7959 | /*
* Model.java
*
* Copyright 2007 William Robertson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
package com.rattat.micro.game.aster.mvc;
import java.util.Enumeration;
import java.util.Vector;
import com.rattat.micro.game.aster.elements.Asteroid;
import com.rattat.micro.game.aster.elements.ExplodingPolygon;
import com.rattat.micro.game.aster.elements.FlyingSaucer;
import com.rattat.micro.game.aster.elements.Missile;
import com.rattat.micro.game.aster.elements.SpaceShip;
import com.rattat.micro.game.aster.elements.Star;
/**
* <p>
* Defines the model which contains all inforamtion about
* the state of the game. FloatingObjects, score and anything
* else required for gameplay is held here.
* </p>
*
* @author william@rattat.com
*/
public class Model {
/**
* The spaceship that the user colntrols
*/
private SpaceShip spaceShip = new SpaceShip(25);
/**
* All asteroids in the game, active and inactive
*/
private Vector asteroids = new Vector();
/**
* All explosions in the game, active and inactive
*/
private Vector explosions = new Vector();
/**
* The missile that can be fired by the ship
*/
private Missile missile = new Missile();
/**
* The maximum speed of the saucer missile
*/
private double shipMissileMaxSpeed = 200;
/**
* The number of lives the user has
*/
private int lives = 0;
/**
* The current score the player has achieved
*/
private int score = 0;
/**
* The time since hte ship was created
*
* @todo - this shouldn't be required - use SpaceShip age
*/
private int createShipTimer = 0;
/**
* The curent level of gameplay - the number of asteroids at the start
*/
private int currentLevel = 1;
/**
* The flying saucer who appears and tries to kill the player
*/
private FlyingSaucer saucer = new FlyingSaucer(FlyingSaucer.LEFT, 20);
/**
* The Missile that is fired by the flying saucer
*/
private Missile saucerMissile = new Missile();
/**
* The maximum speed of the saucer missile
*/
private double saucerMissileMaxSpeed = 170;
/**
* The number of game ticks since the player died
*/
private int deadTimer = -1;
/**
* Flag - ship thruster on or off
*/
private boolean thrusterOn = false;
/**
* The stars that form the background
*/
Star stars[] = new Star[20];
/**
* Create a new instance of Model
*/
public Model() {
saucerMissile.setMaxSpeed(saucerMissileMaxSpeed);
missile.setMaxSpeed(shipMissileMaxSpeed);
}
/**
* Get the collection of Asteroid objects
*
* @return
*/
public Vector getAsteroids() {
return asteroids;
}
/**
* Get the current level of gameplay
*
* @return
*/
public int getCurrentLevel() {
return currentLevel;
}
/**
* Set the current level of gameplay
*
* @param currentLevel
*/
public void setCurrentLevel(int currentLevel) {
this.currentLevel = currentLevel;
}
/**
* Increment the current level by one
*/
public void incCurrentLevel() {
this.currentLevel++;
}
/**
* Get the length of time since the player died
*
* @return
*/
public int getDeadTimer() {
return deadTimer;
}
/**
* Set the length of time since the player died
*
* @param deadTimer
*/
public void setDeadTimer(int deadTimer) {
this.deadTimer = deadTimer;
}
/**
* Increment the length of time since hte player died
*/
public void incDeadTimer() {
this.deadTimer++;
}
/**
* Get the collection of ExplodingPolygon objects
*
* @return
*/
public Vector getExplosions() {
return explosions;
}
/**
* Get the number of lives the payer has remaining
*
* @return
*/
public int getLives() {
return lives;
}
/**
* Set the number of lives the payer has remaining
*
* @param lives
*/
public void setLives(int lives) {
this.lives = lives;
}
/**
* Get the missile that can be fired by the user
*
* @return
*/
public Missile getMissile() {
return missile;
}
/**
* Get the FlyingSaucer object
*
* @return
*/
public FlyingSaucer getSaucer() {
return saucer;
}
/**
* Get the missile that can be fired by the FlyingSaucer
*
* @return
*/
public Missile getSaucerMissile() {
return saucerMissile;
}
/**
* Get the players current score
*
* @return
*/
public int getScore() {
return score;
}
/**
* Set the players current score
*
* @param score
*/
public void setScore(int score) {
this.score = score;
}
/**
* Increment the score by {@code score}
*
* @param score
*/
public void incScore(int score) {
this.score += score;
}
/**
* Get the SpaceShip instance
*
* @return
*/
public SpaceShip getSpaceShip() {
return spaceShip;
}
/**
* Check if the thruster is on
*
* @return True if on, false otherwise
*/
public boolean isThrusterOn() {
return thrusterOn;
}
/**
* Set if the thruster is on or not
*
* @param thrusterOn
*/
public void setThrusterOn(boolean thrusterOn) {
this.thrusterOn = thrusterOn;
}
/**
* Get the stars that form the background
*
* @return
*/
public Star[] getStars() {
return stars;
}
/**
* Set the stars that form the background
*
* @param stars
*/
public void setStars(Star[] stars) {
this.stars = stars;
}
/**
* Get the number of game ticks since the ship was created
*
* @return
*/
public int getCreateShipTimer() {
return createShipTimer;
}
/**
* Set the number of game ticks since the ship was created
*
* @return
*/
public void setCreateShipTimer(int createShipTimer) {
this.createShipTimer = createShipTimer;
}
/**
* Increment the number of game ticks since the ship was created by one
*
* @return
*/
public void incCreateShipTimer() {
createShipTimer++;
}
/**
* <p>
* Get an inactive Asteroid to use or create a new
* one if there are no inactive ones.
* </p>
*
* <p>
* The Asteroid will be activated before it is returned
* </p>
*
* <p>
* This is a simple form of object pooling
* </p>
*
* @return
*/
public Asteroid nextFreeAsteroid() {
Asteroid asteroid = null;
// check if there is an inactive Asteroid to use
for ( Enumeration en = asteroids.elements() ; en.hasMoreElements() ; ) {
asteroid = (Asteroid) en.nextElement();
if ( ! asteroid.isActive() ) {
asteroid.setActive(true);
return asteroid;
}
}
// no existing asteroids found, so create a new one
asteroid = new Asteroid();
asteroid.setActive(true);
if ( ! asteroids.contains(asteroid) ) {
asteroids.addElement(asteroid);
}
return asteroid;
}
/**
* <p>
* Get an inactive ExplodingPolygon to use or create a new
* one if there are no inactive ones.
* </p>
*
* <p>
* The ExplodingPolygon will be activated before it is returned
* </p>
*
* <p>
* This is a simple form of object pooling
* </p>
*
* @return
*/
public ExplodingPolygon nextFreeExplosion() {
ExplodingPolygon explosion = null;
// check if there is an inactive ExplodingPolygon to use
for ( Enumeration en = explosions.elements() ; en.hasMoreElements() ; ) {
explosion = (ExplodingPolygon) en.nextElement();
if ( ! explosion.isActive() ) {
explosion.reset();
explosion.setActive(true);
return explosion;
}
}
// no existing explosions found, so create a new one
explosion = new ExplodingPolygon();
explosion.setActive(true);
explosions.addElement(explosion);
return explosion;
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-appstream/src/main/java/com/amazonaws/services/appstream/model/DescribeSessionsRequest.java | 15384 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.appstream.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/appstream-2016-12-01/DescribeSessions" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeSessionsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the stack. This value is case-sensitive.
* </p>
*/
private String stackName;
/**
* <p>
* The name of the fleet. This value is case-sensitive.
* </p>
*/
private String fleetName;
/**
* <p>
* The user identifier (ID). If you specify a user ID, you must also specify the authentication type.
* </p>
*/
private String userId;
/**
* <p>
* The pagination token to use to retrieve the next page of results for this operation. If this value is null, it
* retrieves the first page.
* </p>
*/
private String nextToken;
/**
* <p>
* The size of each page of results. The default value is 20 and the maximum value is 50.
* </p>
*/
private Integer limit;
/**
* <p>
* The authentication method. Specify <code>API</code> for a user authenticated using a streaming URL or
* <code>SAML</code> for a SAML federated user. The default is to authenticate users using a streaming URL.
* </p>
*/
private String authenticationType;
/**
* <p>
* The name of the stack. This value is case-sensitive.
* </p>
*
* @param stackName
* The name of the stack. This value is case-sensitive.
*/
public void setStackName(String stackName) {
this.stackName = stackName;
}
/**
* <p>
* The name of the stack. This value is case-sensitive.
* </p>
*
* @return The name of the stack. This value is case-sensitive.
*/
public String getStackName() {
return this.stackName;
}
/**
* <p>
* The name of the stack. This value is case-sensitive.
* </p>
*
* @param stackName
* The name of the stack. This value is case-sensitive.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeSessionsRequest withStackName(String stackName) {
setStackName(stackName);
return this;
}
/**
* <p>
* The name of the fleet. This value is case-sensitive.
* </p>
*
* @param fleetName
* The name of the fleet. This value is case-sensitive.
*/
public void setFleetName(String fleetName) {
this.fleetName = fleetName;
}
/**
* <p>
* The name of the fleet. This value is case-sensitive.
* </p>
*
* @return The name of the fleet. This value is case-sensitive.
*/
public String getFleetName() {
return this.fleetName;
}
/**
* <p>
* The name of the fleet. This value is case-sensitive.
* </p>
*
* @param fleetName
* The name of the fleet. This value is case-sensitive.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeSessionsRequest withFleetName(String fleetName) {
setFleetName(fleetName);
return this;
}
/**
* <p>
* The user identifier (ID). If you specify a user ID, you must also specify the authentication type.
* </p>
*
* @param userId
* The user identifier (ID). If you specify a user ID, you must also specify the authentication type.
*/
public void setUserId(String userId) {
this.userId = userId;
}
/**
* <p>
* The user identifier (ID). If you specify a user ID, you must also specify the authentication type.
* </p>
*
* @return The user identifier (ID). If you specify a user ID, you must also specify the authentication type.
*/
public String getUserId() {
return this.userId;
}
/**
* <p>
* The user identifier (ID). If you specify a user ID, you must also specify the authentication type.
* </p>
*
* @param userId
* The user identifier (ID). If you specify a user ID, you must also specify the authentication type.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeSessionsRequest withUserId(String userId) {
setUserId(userId);
return this;
}
/**
* <p>
* The pagination token to use to retrieve the next page of results for this operation. If this value is null, it
* retrieves the first page.
* </p>
*
* @param nextToken
* The pagination token to use to retrieve the next page of results for this operation. If this value is
* null, it retrieves the first page.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The pagination token to use to retrieve the next page of results for this operation. If this value is null, it
* retrieves the first page.
* </p>
*
* @return The pagination token to use to retrieve the next page of results for this operation. If this value is
* null, it retrieves the first page.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The pagination token to use to retrieve the next page of results for this operation. If this value is null, it
* retrieves the first page.
* </p>
*
* @param nextToken
* The pagination token to use to retrieve the next page of results for this operation. If this value is
* null, it retrieves the first page.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeSessionsRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* The size of each page of results. The default value is 20 and the maximum value is 50.
* </p>
*
* @param limit
* The size of each page of results. The default value is 20 and the maximum value is 50.
*/
public void setLimit(Integer limit) {
this.limit = limit;
}
/**
* <p>
* The size of each page of results. The default value is 20 and the maximum value is 50.
* </p>
*
* @return The size of each page of results. The default value is 20 and the maximum value is 50.
*/
public Integer getLimit() {
return this.limit;
}
/**
* <p>
* The size of each page of results. The default value is 20 and the maximum value is 50.
* </p>
*
* @param limit
* The size of each page of results. The default value is 20 and the maximum value is 50.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeSessionsRequest withLimit(Integer limit) {
setLimit(limit);
return this;
}
/**
* <p>
* The authentication method. Specify <code>API</code> for a user authenticated using a streaming URL or
* <code>SAML</code> for a SAML federated user. The default is to authenticate users using a streaming URL.
* </p>
*
* @param authenticationType
* The authentication method. Specify <code>API</code> for a user authenticated using a streaming URL or
* <code>SAML</code> for a SAML federated user. The default is to authenticate users using a streaming URL.
* @see AuthenticationType
*/
public void setAuthenticationType(String authenticationType) {
this.authenticationType = authenticationType;
}
/**
* <p>
* The authentication method. Specify <code>API</code> for a user authenticated using a streaming URL or
* <code>SAML</code> for a SAML federated user. The default is to authenticate users using a streaming URL.
* </p>
*
* @return The authentication method. Specify <code>API</code> for a user authenticated using a streaming URL or
* <code>SAML</code> for a SAML federated user. The default is to authenticate users using a streaming URL.
* @see AuthenticationType
*/
public String getAuthenticationType() {
return this.authenticationType;
}
/**
* <p>
* The authentication method. Specify <code>API</code> for a user authenticated using a streaming URL or
* <code>SAML</code> for a SAML federated user. The default is to authenticate users using a streaming URL.
* </p>
*
* @param authenticationType
* The authentication method. Specify <code>API</code> for a user authenticated using a streaming URL or
* <code>SAML</code> for a SAML federated user. The default is to authenticate users using a streaming URL.
* @return Returns a reference to this object so that method calls can be chained together.
* @see AuthenticationType
*/
public DescribeSessionsRequest withAuthenticationType(String authenticationType) {
setAuthenticationType(authenticationType);
return this;
}
/**
* <p>
* The authentication method. Specify <code>API</code> for a user authenticated using a streaming URL or
* <code>SAML</code> for a SAML federated user. The default is to authenticate users using a streaming URL.
* </p>
*
* @param authenticationType
* The authentication method. Specify <code>API</code> for a user authenticated using a streaming URL or
* <code>SAML</code> for a SAML federated user. The default is to authenticate users using a streaming URL.
* @see AuthenticationType
*/
public void setAuthenticationType(AuthenticationType authenticationType) {
withAuthenticationType(authenticationType);
}
/**
* <p>
* The authentication method. Specify <code>API</code> for a user authenticated using a streaming URL or
* <code>SAML</code> for a SAML federated user. The default is to authenticate users using a streaming URL.
* </p>
*
* @param authenticationType
* The authentication method. Specify <code>API</code> for a user authenticated using a streaming URL or
* <code>SAML</code> for a SAML federated user. The default is to authenticate users using a streaming URL.
* @return Returns a reference to this object so that method calls can be chained together.
* @see AuthenticationType
*/
public DescribeSessionsRequest withAuthenticationType(AuthenticationType authenticationType) {
this.authenticationType = authenticationType.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getStackName() != null)
sb.append("StackName: ").append(getStackName()).append(",");
if (getFleetName() != null)
sb.append("FleetName: ").append(getFleetName()).append(",");
if (getUserId() != null)
sb.append("UserId: ").append(getUserId()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getLimit() != null)
sb.append("Limit: ").append(getLimit()).append(",");
if (getAuthenticationType() != null)
sb.append("AuthenticationType: ").append(getAuthenticationType());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeSessionsRequest == false)
return false;
DescribeSessionsRequest other = (DescribeSessionsRequest) obj;
if (other.getStackName() == null ^ this.getStackName() == null)
return false;
if (other.getStackName() != null && other.getStackName().equals(this.getStackName()) == false)
return false;
if (other.getFleetName() == null ^ this.getFleetName() == null)
return false;
if (other.getFleetName() != null && other.getFleetName().equals(this.getFleetName()) == false)
return false;
if (other.getUserId() == null ^ this.getUserId() == null)
return false;
if (other.getUserId() != null && other.getUserId().equals(this.getUserId()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getLimit() == null ^ this.getLimit() == null)
return false;
if (other.getLimit() != null && other.getLimit().equals(this.getLimit()) == false)
return false;
if (other.getAuthenticationType() == null ^ this.getAuthenticationType() == null)
return false;
if (other.getAuthenticationType() != null && other.getAuthenticationType().equals(this.getAuthenticationType()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getStackName() == null) ? 0 : getStackName().hashCode());
hashCode = prime * hashCode + ((getFleetName() == null) ? 0 : getFleetName().hashCode());
hashCode = prime * hashCode + ((getUserId() == null) ? 0 : getUserId().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getLimit() == null) ? 0 : getLimit().hashCode());
hashCode = prime * hashCode + ((getAuthenticationType() == null) ? 0 : getAuthenticationType().hashCode());
return hashCode;
}
@Override
public DescribeSessionsRequest clone() {
return (DescribeSessionsRequest) super.clone();
}
}
| apache-2.0 |
xiaomozhang/druid | druid-1.0.9/src/test/java/com/alibaba/druid/bvt/sql/mysql/MySqlInsertTest_10.java | 3466 | /*
* Copyright 1999-2011 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.druid.bvt.sql.mysql;
import java.util.List;
import org.junit.Assert;
import com.alibaba.druid.sql.MysqlTest;
import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlInsertStatement;
import com.alibaba.druid.sql.dialect.mysql.parser.MySqlStatementParser;
import com.alibaba.druid.sql.dialect.mysql.visitor.MySqlSchemaStatVisitor;
public class MySqlInsertTest_10 extends MysqlTest {
public void test_parseCompleteValues_false() throws Exception {
String sql = "insert into t(a,b) values ('a1','b1'),('a2','b2'),('a3','b3'),('a4','b4');";
MySqlStatementParser parser = new MySqlStatementParser(sql);
parser.setParseCompleteValues(false);
parser.setParseValuesSize(3);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
MySqlInsertStatement insertStmt = (MySqlInsertStatement) stmt;
Assert.assertEquals(3, insertStmt.getValuesList().size());
Assert.assertEquals(2, insertStmt.getValues().getValues().size());
Assert.assertEquals(2, insertStmt.getColumns().size());
Assert.assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
String formatSql = "INSERT INTO t (a, b)"//
+ "\nVALUES ('a1', 'b1'),"//
+ "\n\t('a2', 'b2'),"//
+ "\n\t('a3', 'b3')";
Assert.assertEquals(formatSql, SQLUtils.toMySqlString(insertStmt));
}
public void test_parseCompleteValues_true() throws Exception {
String sql = "insert into t(a,b) values ('a1','b1'),('a2','b2'),('a3','b3'),('a4','b4');";
MySqlStatementParser parser = new MySqlStatementParser(sql);
parser.setParseCompleteValues(true);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
MySqlInsertStatement insertStmt = (MySqlInsertStatement) stmt;
Assert.assertEquals(4, insertStmt.getValuesList().size());
Assert.assertEquals(2, insertStmt.getValues().getValues().size());
Assert.assertEquals(2, insertStmt.getColumns().size());
Assert.assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
String formatSql = "INSERT INTO t (a, b)"//
+ "\nVALUES ('a1', 'b1'),"//
+ "\n\t('a2', 'b2'),"//
+ "\n\t('a3', 'b3'),"//
+ "\n\t('a4', 'b4')";
Assert.assertEquals(formatSql, SQLUtils.toMySqlString(insertStmt));
}
}
| apache-2.0 |