repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
SevakAvet/PhysLab | src/ObjectListener.java | 77 | public interface ObjectListener {
public void objectChanged(Object o);
}
| apache-2.0 |
Bernardo-MG/dreadball-toolkit-webpage | src/main/java/com/bernardomg/tabletop/dreadball/report/service/DreadballReportBuilder.java | 1497 | /**
* Copyright 2018 the original author or authors
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.bernardomg.tabletop.dreadball.report.service;
import java.io.IOException;
import java.io.OutputStream;
import com.bernardomg.tabletop.dreadball.model.team.SponsorTeam;
import com.itextpdf.text.DocumentException;
/**
* Report builder.
*
* @author Bernardo Martínez Garrido
*
*/
public interface DreadballReportBuilder {
/**
* Creates a PDF report and stores it in the output.
*
* @param team
* team for the report
* @param output
* output where the report will be stored
*
* @throws IOException
* if there was an I/O error
* @throws DocumentException
* if there was an error generating the document
*/
public void createPdf(final SponsorTeam team, final OutputStream output)
throws IOException, DocumentException;
}
| apache-2.0 |
metaborg/jsglr | org.spoofax.interpreter.library.jsglr/src/main/java/org/spoofax/interpreter/library/jsglr/origin/OriginTextPrimitive.java | 874 | package org.spoofax.interpreter.library.jsglr.origin;
import static org.spoofax.jsglr.client.imploder.ImploderAttachment.getLeftToken;
import static org.spoofax.jsglr.client.imploder.ImploderAttachment.getRightToken;
import static org.spoofax.jsglr.client.imploder.ImploderAttachment.getTokenizer;
import org.spoofax.interpreter.core.IContext;
import org.spoofax.interpreter.terms.IStrategoTerm;
/**
* @author Lennart Kats <lennart add lclnet.nl>
*/
public class OriginTextPrimitive extends AbstractOriginPrimitive {
public OriginTextPrimitive() {
super("SSL_EXT_origin_text");
}
@Override
protected IStrategoTerm call(IContext env, IStrategoTerm origin) {
String result = getTokenizer(origin).toString(getLeftToken(origin), getRightToken(origin));
//result = AutoEditStrategy.setIndentation(result, "");
return env.getFactory().makeString(result);
}
}
| apache-2.0 |
trifonnt/_book-learning-spring-boot-code | ch3/network-manager-4/src/main/java/learningspringboot/Application.java | 1813 | package learningspringboot;
import javax.jms.ConnectionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.jms.JmsProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.jms.listener.SimpleMessageListenerContainer;
import org.springframework.jms.listener.adapter.MessageListenerAdapter;
import org.springframework.scheduling.annotation.EnableScheduling;
@Configuration
@ComponentScan
@EnableScheduling
@EnableAutoConfiguration
public class Application {
private static final String MAILBOX = "events";
@Autowired
private JmsProperties properties;
@Bean
MessageListenerAdapter adapter(NetworkEventConsumer consumer) {
MessageListenerAdapter adapter = new MessageListenerAdapter(consumer);
adapter.setDefaultListenerMethod("process");
return adapter;
}
@Bean
SimpleMessageListenerContainer container(MessageListenerAdapter consumer,
ConnectionFactory factory) {
SimpleMessageListenerContainer container =
new SimpleMessageListenerContainer();
container.setMessageListener(consumer);
container.setConnectionFactory(factory);
container.setPubSubDomain(this.properties.isPubSubDomain());
container.setDestinationName(MAILBOX);
return container;
}
@Bean
NetworkEventSimulator simulator(JmsTemplate jmsTemplate) {
return new NetworkEventSimulator(jmsTemplate, MAILBOX);
}
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
| apache-2.0 |
andreabertagnolli/orika | tests/src/main/java/ma/glasnost/orika/test/community/Issue26TestCase.java | 1939 | /*
* Orika - simpler, better and faster Java bean mapping
*
* Copyright (C) 2011-2013 Orika authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ma.glasnost.orika.test.community;
import org.junit.Assert;
import org.junit.Test;
import ma.glasnost.orika.MapperFacade;
import ma.glasnost.orika.MapperFactory;
import ma.glasnost.orika.test.MappingUtil;
import ma.glasnost.orika.test.community.issue26.Order;
import ma.glasnost.orika.test.community.issue26.OrderData;
import ma.glasnost.orika.test.community.issue26.OrderID;
import ma.glasnost.orika.test.community.issue26.OrderIDConverter;
/**
* Generic super-type not recognized.
* <p>
*
* @see <a href="https://code.google.com/archive/p/orika/issues/26">https://code.google.com/archive/p/orika/</a>
*
*/
public class Issue26TestCase {
@Test
public void testMapping() {
MapperFactory mapperFactory = MappingUtil.getMapperFactory();
mapperFactory.registerClassMap(
mapperFactory.classMap(Order.class, OrderData.class)
.field("entityID", "orderId").byDefault().toClassMap());
mapperFactory.getConverterFactory().registerConverter(new OrderIDConverter());
MapperFacade facade = mapperFactory.getMapperFacade();
OrderData data = new OrderData(1234l);
Order order = facade.map(data, Order.class);
Assert.assertEquals(new OrderID(1234l), order.getEntityID());
}
}
| apache-2.0 |
sarah-happy/happy-archive | archive/src/main/java/org/yi/happy/archive/index/IndexStoreFileStore.java | 2659 | package org.yi.happy.archive.index;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.zip.GZIPInputStream;
import org.yi.happy.archive.EnvIndex;
import org.yi.happy.archive.FileStore;
import com.google.inject.Inject;
/**
* An {@link IndexStore} in a {@link FileStore}.
*/
public class IndexStoreFileStore implements IndexStore {
private final FileStore files;
private final String base;
/**
* Set up.
*
* @param files
* the {@link FileStore} where the indexes are.
* @param base
* the base file name.
*/
@Inject
public IndexStoreFileStore(FileStore files, @EnvIndex String base) {
this.files = files;
this.base = base;
}
@Override
public List<String> listVolumeSets() throws IOException {
List<String> out = new ArrayList<String>();
if (files.isDir(base)) {
for (String name : files.list(base)) {
if (files.isDir(base + "/" + name)) {
out.add(name);
}
}
}
Collections.sort(out);
return out;
}
@Override
public List<String> listVolumeNames(String volumeSet) throws IOException {
List<String> out = new ArrayList<String>();
String path = base + "/" + volumeSet;
if (files.isDir(path)) {
for (String name : files.list(path)) {
if (files.isFile(path + "/" + name)) {
if (name.endsWith(".gz")) {
name = name.substring(0, name.length() - 3);
}
out.add(name);
}
}
}
Collections.sort(out);
return out;
}
@Override
public Reader open(String volumeSet, String volumeName) throws IOException {
String name = base + "/" + volumeSet + "/" + volumeName;
InputStream in = null;
try {
InputStream stream = in;
try {
in = files.getStream(name);
stream = in;
} catch (FileNotFoundException e) {
in = files.getStream(name + ".gz");
stream = new GZIPInputStream(in);
}
Reader reader = new InputStreamReader(stream, "utf-8");
in = null;
return reader;
} finally {
if (in != null) {
in.close();
}
}
}
}
| apache-2.0 |
majetideepak/arrow | java/memory/src/main/java/org/apache/arrow/memory/Accountant.java | 10859 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.memory;
import java.util.concurrent.atomic.AtomicLong;
import javax.annotation.concurrent.ThreadSafe;
import org.apache.arrow.util.Preconditions;
/**
* Provides a concurrent way to manage account for memory usage without locking. Used as basis
* for Allocators. All
* operations are threadsafe (except for close).
*/
@ThreadSafe
class Accountant implements AutoCloseable {
/**
* The parent allocator.
*/
protected final Accountant parent;
private final String name;
/**
* The amount of memory reserved for this allocator. Releases below this amount of memory will
* not be returned to the
* parent Accountant until this Accountant is closed.
*/
protected final long reservation;
private final AtomicLong peakAllocation = new AtomicLong();
/**
* Maximum local memory that can be held. This can be externally updated. Changing it won't
* cause past memory to
* change but will change responses to future allocation efforts
*/
private final AtomicLong allocationLimit = new AtomicLong();
/**
* Currently allocated amount of memory.
*/
private final AtomicLong locallyHeldMemory = new AtomicLong();
public Accountant(Accountant parent, String name, long reservation, long maxAllocation) {
Preconditions.checkNotNull(name, "name must not be null");
Preconditions.checkArgument(reservation >= 0, "The initial reservation size must be " +
"non-negative.");
Preconditions.checkArgument(maxAllocation >= 0, "The maximum allocation limit must be " +
"non-negative.");
Preconditions.checkArgument(reservation <= maxAllocation,
"The initial reservation size must be <= the maximum allocation.");
Preconditions.checkArgument(reservation == 0 || parent != null, "The root accountant can't " +
"reserve memory.");
this.parent = parent;
this.name = name;
this.reservation = reservation;
this.allocationLimit.set(maxAllocation);
if (reservation != 0) {
// we will allocate a reservation from our parent.
final AllocationOutcome outcome = parent.allocateBytes(reservation);
if (!outcome.isOk()) {
throw new OutOfMemoryException(String.format(
"Failure trying to allocate initial reservation for Allocator. " +
"Attempted to allocate %d bytes.", reservation,
outcome.getStatus().name()), outcome.getDetails());
}
}
}
/**
* Attempt to allocate the requested amount of memory. Either completely succeeds or completely
* fails. If it fails, no changes are made to accounting.
*
* @param size The amount of memory to reserve in bytes.
* @return the status and details of allocation at each allocator in the chain.
*/
AllocationOutcome allocateBytes(long size) {
AllocationOutcome.Status status = allocateBytesInternal(size);
if (status.isOk()) {
return AllocationOutcome.SUCCESS_INSTANCE;
} else {
// Try again, but with details this time.
// Populating details only on failures avoids performance overhead in the common case (success case).
AllocationOutcomeDetails details = new AllocationOutcomeDetails();
status = allocateBytesInternal(size, details);
return new AllocationOutcome(status, details);
}
}
private AllocationOutcome.Status allocateBytesInternal(long size, AllocationOutcomeDetails details) {
final AllocationOutcome.Status status = allocate(size,
true /*incomingUpdatePeek*/, false /*forceAllocation*/, details);
if (!status.isOk()) {
releaseBytes(size);
}
return status;
}
private AllocationOutcome.Status allocateBytesInternal(long size) {
return allocateBytesInternal(size, null /*details*/);
}
private void updatePeak() {
final long currentMemory = locallyHeldMemory.get();
while (true) {
final long previousPeak = peakAllocation.get();
if (currentMemory > previousPeak) {
if (!peakAllocation.compareAndSet(previousPeak, currentMemory)) {
// peak allocation changed underneath us. try again.
continue;
}
}
// we either succeeded to set peak allocation or we weren't above the previous peak, exit.
return;
}
}
/**
* Increase the accounting. Returns whether the allocation fit within limits.
*
* @param size to increase
* @return Whether the allocation fit within limits.
*/
boolean forceAllocate(long size) {
final AllocationOutcome.Status outcome = allocate(size, true, true, null);
return outcome.isOk();
}
/**
* Internal method for allocation. This takes a forced approach to allocation to ensure that we
* manage reservation
* boundary issues consistently. Allocation is always done through the entire tree. The two
* options that we influence
* are whether the allocation should be forced and whether or not the peak memory allocation
* should be updated. If at
* some point during allocation escalation we determine that the allocation is no longer
* possible, we will continue to
* do a complete and consistent allocation but we will stop updating the peak allocation. We do
* this because we know
* that we will be directly unwinding this allocation (and thus never actually making the
* allocation). If force
* allocation is passed, then we continue to update the peak limits since we now know that this
* allocation will occur
* despite our moving past one or more limits.
*
* @param size The size of the allocation.
* @param incomingUpdatePeak Whether we should update the local peak for this allocation.
* @param forceAllocation Whether we should force the allocation.
* @return The outcome of the allocation.
*/
private AllocationOutcome.Status allocate(final long size, final boolean incomingUpdatePeak,
final boolean forceAllocation, AllocationOutcomeDetails details) {
final long newLocal = locallyHeldMemory.addAndGet(size);
final long beyondReservation = newLocal - reservation;
final boolean beyondLimit = newLocal > allocationLimit.get();
final boolean updatePeak = forceAllocation || (incomingUpdatePeak && !beyondLimit);
if (details != null) {
// Add details if required (used in exceptions and debugging).
boolean allocationFailed = true;
long allocatedLocal = 0;
if (!beyondLimit) {
allocatedLocal = size - Math.min(beyondReservation, size);
allocationFailed = false;
}
details.pushEntry(this, newLocal - size, size, allocatedLocal, allocationFailed);
}
AllocationOutcome.Status parentOutcome = AllocationOutcome.Status.SUCCESS;
if (beyondReservation > 0 && parent != null) {
// we need to get memory from our parent.
final long parentRequest = Math.min(beyondReservation, size);
parentOutcome = parent.allocate(parentRequest, updatePeak, forceAllocation, details);
}
final AllocationOutcome.Status finalOutcome;
if (beyondLimit) {
finalOutcome = AllocationOutcome.Status.FAILED_LOCAL;
} else {
finalOutcome = parentOutcome.isOk() ? AllocationOutcome.Status.SUCCESS
: AllocationOutcome.Status.FAILED_PARENT;
}
if (updatePeak) {
updatePeak();
}
return finalOutcome;
}
public void releaseBytes(long size) {
// reduce local memory. all memory released above reservation should be released up the tree.
final long newSize = locallyHeldMemory.addAndGet(-size);
Preconditions.checkArgument(newSize >= 0, "Accounted size went negative.");
final long originalSize = newSize + size;
if (originalSize > reservation && parent != null) {
// we deallocated memory that we should release to our parent.
final long possibleAmountToReleaseToParent = originalSize - reservation;
final long actualToReleaseToParent = Math.min(size, possibleAmountToReleaseToParent);
parent.releaseBytes(actualToReleaseToParent);
}
}
public boolean isOverLimit() {
return getAllocatedMemory() > getLimit() || (parent != null && parent.isOverLimit());
}
/**
* Close this Accountant. This will release any reservation bytes back to a parent Accountant.
*/
@Override
public void close() {
// return memory reservation to parent allocator.
if (parent != null) {
parent.releaseBytes(reservation);
}
}
/**
* Return the name of the accountant.
*
* @return name of accountant
*/
public String getName() {
return name;
}
/**
* Return the current limit of this Accountant.
*
* @return Limit in bytes.
*/
public long getLimit() {
return allocationLimit.get();
}
/**
* Return the initial reservation.
*
* @return reservation in bytes.
*/
public long getInitReservation() {
return reservation;
}
/**
* Set the maximum amount of memory that can be allocated in the this Accountant before failing
* an allocation.
*
* @param newLimit The limit in bytes.
*/
public void setLimit(long newLimit) {
allocationLimit.set(newLimit);
}
/**
* Return the current amount of allocated memory that this Accountant is managing accounting
* for. Note this does not
* include reservation memory that hasn't been allocated.
*
* @return Currently allocate memory in bytes.
*/
public long getAllocatedMemory() {
return locallyHeldMemory.get();
}
/**
* The peak memory allocated by this Accountant.
*
* @return The peak allocated memory in bytes.
*/
public long getPeakMemoryAllocation() {
return peakAllocation.get();
}
public long getHeadroom() {
long localHeadroom = allocationLimit.get() - locallyHeldMemory.get();
if (parent == null) {
return localHeadroom;
}
// Amount of reserved memory left on top of what parent has
long reservedHeadroom = Math.max(0, reservation - locallyHeldMemory.get());
return Math.min(localHeadroom, parent.getHeadroom() + reservedHeadroom);
}
}
| apache-2.0 |
wuyinlei/MyHearts | app/src/main/java/ruolan/com/myhearts/widget/dialog/CustomPrograss.java | 3304 | package ruolan.com.myhearts.widget.dialog;
import android.app.Dialog;
import android.content.Context;
import android.graphics.drawable.AnimationDrawable;
import android.view.Gravity;
import android.view.View;
import android.view.WindowManager;
import android.widget.ImageView;
import android.widget.TextView;
import ruolan.com.myhearts.R;
/**
* Created by renren on 2016/9/12.
*
*/
public class CustomPrograss extends Dialog {
private static CustomPrograss sPrograss;
public CustomPrograss(Context context) {
super(context);
}
private CustomPrograss(Context context, int themeResId) {
super(context, themeResId);
}
protected CustomPrograss(Context context, boolean cancelable, OnCancelListener cancelListener) {
super(context, cancelable, cancelListener);
}
/**
* 当窗口焦点改变时调用
*/
public void onWindowFocusChanged(boolean hasFocus) {
ImageView imageView = (ImageView) findViewById(R.id.spinnerImageView);
// 获取ImageView上的动画背景
AnimationDrawable spinner = (AnimationDrawable) imageView.getBackground();
// 开始动画
spinner.start();
}
/**
* 给Dialog设置提示信息
*
* @param message 需要显示的信息
*/
public void setMessage(CharSequence message) {
if (message != null && message.length() > 0) {
findViewById(R.id.message).setVisibility(View.VISIBLE);
TextView txt = (TextView) findViewById(R.id.message);
txt.setText(message);
txt.invalidate();
}
}
/**
* 弹出自定义ProgressDialog
*
* @param context 上下文
* @param message 提示
* @param cancelable 是否按返回键取消
* @param cancelListener 按下返回键监听
* @return CustomPrograss
*/
public static CustomPrograss show(Context context, CharSequence message, boolean cancelable, OnCancelListener cancelListener) {
sPrograss = new CustomPrograss(context, R.style.Custom_Progress);
sPrograss.setTitle("");
sPrograss.setContentView(R.layout.custom_prograss_dialog_layout);
if (message == null || message.length() == 0) {
sPrograss.findViewById(R.id.message).setVisibility(View.GONE);
} else {
TextView txt = (TextView) sPrograss.findViewById(R.id.message);
txt.setText(message);
}
// 按返回键是否取消
sPrograss.setCancelable(cancelable);
// 监听返回键处理
if (cancelListener != null) {
sPrograss.setOnCancelListener(cancelListener);
}
// 设置居中
sPrograss.getWindow().getAttributes().gravity = Gravity.CENTER;
WindowManager.LayoutParams lp = sPrograss.getWindow().getAttributes();
// 设置背景层透明度
lp.dimAmount = 0.2f;
sPrograss.getWindow().setAttributes(lp);
// dialog.getWindow().addFlags(WindowManager.LayoutParams.FLAG_BLUR_BEHIND);
sPrograss.show();
return sPrograss;
}
/**
* 关闭dialog
*/
public static void disMiss() {
if (sPrograss != null && sPrograss.isShowing()) {
sPrograss.dismiss();
}
}
}
| apache-2.0 |
brianchen2012/syncope | console/src/main/java/org/apache/syncope/console/pages/Login.java | 10635 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.console.pages;
import java.util.Locale;
import java.util.Set;
import org.apache.http.HttpResponse;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.util.EntityUtils;
import org.apache.syncope.client.http.PreemptiveAuthHttpRequestFactory;
import org.apache.syncope.common.services.EntitlementService;
import org.apache.syncope.common.services.UserRequestService;
import org.apache.syncope.common.to.EntitlementTO;
import org.apache.syncope.common.to.UserTO;
import org.apache.syncope.common.util.CollectionWrapper;
import org.apache.syncope.console.SyncopeSession;
import org.apache.syncope.console.wicket.ajax.markup.html.ClearIndicatingAjaxLink;
import org.apache.syncope.console.wicket.markup.html.form.LinkPanel;
import org.apache.wicket.Page;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.AjaxLink;
import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow;
import org.apache.wicket.markup.html.WebPage;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.Button;
import org.apache.wicket.markup.html.form.ChoiceRenderer;
import org.apache.wicket.markup.html.form.DropDownChoice;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.PasswordTextField;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.panel.FeedbackPanel;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.ResourceModel;
import org.apache.wicket.request.mapper.parameter.PageParameters;
import org.apache.wicket.spring.injection.annot.SpringBean;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.RestTemplate;
/**
* Syncope Login page.
*/
public class Login extends WebPage {
/**
* Logger.
*/
private static final Logger LOG = LoggerFactory.getLogger(Login.class);
private static final long serialVersionUID = -3744389270366566218L;
private final static int SELF_REG_WIN_HEIGHT = 550;
private final static int SELF_REG_WIN_WIDTH = 800;
@SpringBean(name = "baseURL")
private String baseURL;
private Form form;
private TextField userIdField;
private TextField passwordField;
private DropDownChoice<Locale> languageSelect;
public Login(final PageParameters parameters) {
super(parameters);
form = new Form("login");
userIdField = new TextField("userId", new Model());
userIdField.setMarkupId("userId");
form.add(userIdField);
passwordField = new PasswordTextField("password", new Model());
passwordField.setMarkupId("password");
form.add(passwordField);
languageSelect = new LocaleDropDown("language");
form.add(languageSelect);
Button submitButton = new Button("submit", new Model<String>(getString("submit"))) {
private static final long serialVersionUID = 429178684321093953L;
@Override
public void onSubmit() {
try {
String[] entitlements = authenticate(userIdField.getRawInput(), passwordField.getRawInput());
SyncopeSession.get().setUserId(userIdField.getRawInput());
SyncopeSession.get().setEntitlements(entitlements);
SyncopeSession.get().setVersion(getSyncopeVersion());
setResponsePage(WelcomePage.class, parameters);
} catch (HttpClientErrorException e) {
error(getString("login-error"));
PreemptiveAuthHttpRequestFactory requestFactory =
((PreemptiveAuthHttpRequestFactory) SyncopeSession.
get().getRestTemplate().getRequestFactory());
((DefaultHttpClient) requestFactory.getHttpClient()).getCredentialsProvider().clear();
}
}
};
submitButton.setDefaultFormProcessing(false);
form.add(submitButton);
add(form);
add(new FeedbackPanel("feedback"));
// Modal window for self registration
final ModalWindow editProfileModalWin = new ModalWindow("selfRegModal");
editProfileModalWin.setCssClassName(ModalWindow.CSS_CLASS_GRAY);
editProfileModalWin.setInitialHeight(SELF_REG_WIN_HEIGHT);
editProfileModalWin.setInitialWidth(SELF_REG_WIN_WIDTH);
editProfileModalWin.setCookieName("self-reg-modal");
add(editProfileModalWin);
Fragment selfRegFrag;
if (isSelfRegistrationAllowed()) {
selfRegFrag = new Fragment("selfRegistration", "selfRegAllowed", this);
final AjaxLink selfRegLink = new ClearIndicatingAjaxLink("link", getPageReference()) {
private static final long serialVersionUID = -7978723352517770644L;
@Override
protected void onClickInternal(final AjaxRequestTarget target) {
editProfileModalWin.setPageCreator(new ModalWindow.PageCreator() {
private static final long serialVersionUID = -7834632442532690940L;
@Override
public Page createPage() {
return new UserRequestModalPage(Login.this.getPageReference(), editProfileModalWin,
new UserTO(), UserModalPage.Mode.SELF);
}
});
editProfileModalWin.show(target);
}
};
selfRegLink.add(new Label("linkTitle", getString("selfRegistration")));
Panel panel = new LinkPanel("selfRegistration", new ResourceModel("selfRegistration"));
panel.add(selfRegLink);
selfRegFrag.add(panel);
} else {
selfRegFrag = new Fragment("selfRegistration", "selfRegNotAllowed", this);
}
add(selfRegFrag);
}
private String[] authenticate(final String userId, final String password) {
final RestTemplate restTemplate = SyncopeSession.get().getRestTemplate();
// 1. Set provided credentials to check
PreemptiveAuthHttpRequestFactory requestFactory =
((PreemptiveAuthHttpRequestFactory) restTemplate.getRequestFactory());
((DefaultHttpClient) requestFactory.getHttpClient()).getCredentialsProvider().setCredentials(
requestFactory.getAuthScope(), new UsernamePasswordCredentials(userId, password));
// 2. Search authorizations for user specified by credentials
Set<EntitlementTO> entitlements = SyncopeSession.get().getService(EntitlementService.class).getMyEntitlements();
return CollectionWrapper.unwrap(entitlements).toArray(new String[0]);
}
private boolean isSelfRegistrationAllowed() {
Boolean result = null;
try {
result = SyncopeSession.get().getService(UserRequestService.class).isCreateAllowed();
} catch (HttpClientErrorException e) {
LOG.error("While seeking if self registration is allowed", e);
}
return result == null
? false
: result.booleanValue();
}
private String getSyncopeVersion() {
final RestTemplate restTemplate = SyncopeSession.get().getRestTemplate();
PreemptiveAuthHttpRequestFactory requestFactory = ((PreemptiveAuthHttpRequestFactory) restTemplate.
getRequestFactory());
String version = "";
try {
HttpGet get = new HttpGet(baseURL + "../version.jsp");
HttpResponse response = requestFactory.getHttpClient().execute(get);
version = EntityUtils.toString(response.getEntity()).trim();
} catch (Exception e) {
LOG.error("While fetching version from core", e);
getSession().error(e.getMessage());
}
return version;
}
/**
* Inner class which implements (custom) Locale DropDownChoice component.
*/
private class LocaleDropDown extends DropDownChoice<Locale> {
private static final long serialVersionUID = 2349382679992357202L;
private class LocaleRenderer extends ChoiceRenderer<Locale> {
private static final long serialVersionUID = -3657529581555164741L;
@Override
public String getDisplayValue(final Locale locale) {
return locale.getDisplayName(getLocale());
}
}
public LocaleDropDown(final String id) {
super(id, SyncopeSession.SUPPORTED_LOCALES);
setChoiceRenderer(new LocaleRenderer());
setModel(new IModel<Locale>() {
private static final long serialVersionUID = -6985170095629312963L;
@Override
public Locale getObject() {
return getSession().getLocale();
}
@Override
public void setObject(final Locale object) {
getSession().setLocale(object);
}
@Override
public void detach() {
// Empty.
}
});
// set default value to English
getModel().setObject(Locale.ENGLISH);
}
@Override
protected boolean wantOnSelectionChangedNotifications() {
return true;
}
}
}
| apache-2.0 |
bozimmerman/CoffeeMud | com/planet_ink/coffee_mud/Abilities/Fighter/Fighter_CriticalShot.java | 3496 | package com.planet_ink.coffee_mud.Abilities.Fighter;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Fighter_CriticalShot extends FighterSkill
{
@Override
public String ID()
{
return "Fighter_CriticalShot";
}
private final static String localizedName = CMLib.lang().L("Critical Shot");
@Override
public String name()
{
return localizedName;
}
@Override
public String displayText()
{
return "";
}
@Override
public int abstractQuality()
{
return Ability.QUALITY_BENEFICIAL_SELF;
}
@Override
protected int canAffectCode()
{
return Ability.CAN_MOBS;
}
@Override
protected int canTargetCode()
{
return 0;
}
@Override
public boolean isAutoInvoked()
{
return true;
}
@Override
public boolean canBeUninvoked()
{
return false;
}
@Override
public int classificationCode()
{
return Ability.ACODE_SKILL|Ability.DOMAIN_MARTIALLORE;
}
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if(!super.okMessage(myHost,msg))
return false;
if(!(affected instanceof MOB))
return true;
final MOB mob=(MOB)affected;
if(msg.amISource(mob)
&&(CMLib.flags().isAliveAwakeMobile(mob,true))
&&(msg.targetMinor()==CMMsg.TYP_DAMAGE)
&&(msg.target()!=null)
&&(mob.getVictim()==msg.target())
&&(msg.tool() instanceof Weapon)
&&((((Weapon)msg.tool()).weaponClassification()==Weapon.CLASS_RANGED)
||(((Weapon)msg.tool()).weaponClassification()==Weapon.CLASS_THROWN))
&&((mob.rangeToTarget()>0)||((((Weapon)msg.tool()).phyStats().sensesMask()&PhyStats.SENSE_ITEMNOMINRANGE)==PhyStats.SENSE_ITEMNOMINRANGE))
&&((mob.fetchAbility(ID())==null)||proficiencyCheck(null,(-75)+mob.charStats().getStat(CharStats.STAT_DEXTERITY)+(2*getXLEVELLevel(mob)),false)))
{
final double pctRecovery=(CMath.div(proficiency(),100.0)*Math.random());
final int bonus=(int)Math.round(CMath.mul((msg.value()),pctRecovery));
msg.setValue(msg.value()+bonus);
helpProficiency(mob, 0);
}
return true;
}
}
| apache-2.0 |
fakereplace/fakereplace | plugins/metawidget/src/main/java/org/fakereplace/integration/metawidget/MetawidgetClassChangeAware.java | 2647 | /*
* Copyright 2016, Stuart Douglas, and individual contributors as indicated
* by the @authors tag.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fakereplace.integration.metawidget;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.fakereplace.api.ChangedClass;
import org.fakereplace.api.ClassChangeAware;
import org.fakereplace.api.NewClassData;
import org.fakereplace.data.InstanceTracker;
public class MetawidgetClassChangeAware implements ClassChangeAware {
private static final Method remove;
static {
try {
remove = Map.class.getMethod("remove", Object.class);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private static Field getField(Class<?> clazz, String name) throws NoSuchFieldException {
if (clazz == Object.class)
throw new NoSuchFieldException();
try {
return clazz.getDeclaredField(name);
} catch (Exception e) {
// TODO: handle exception
}
return getField(clazz.getSuperclass(), name);
}
/**
* clear the action and properties caches
*/
@Override
public void afterChange(List<ChangedClass> changed, List<NewClassData> added) {
Set<?> data = InstanceTracker.get(MetawidgetExtension.BASE_ACTION_STYLE);
for (Object i : data) {
clearMap(changed, i, "mActionCache");
}
data = InstanceTracker.get(MetawidgetExtension.BASE_PROPERTY_STYLE);
for (Object i : data) {
clearMap(changed, i, "mPropertiesCache");
}
}
public static void clearMap(List<ChangedClass> changed, Object i, String cacheName) {
try {
Field f = getField(i.getClass(), cacheName);
f.setAccessible(true);
Object map = f.get(i);
for (ChangedClass c : changed) {
remove.invoke(map, c.getChangedClass());
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
| apache-2.0 |
sflpro/ms_payment | services/services_core/src/main/java/com/sfl/pms/services/payment/redirect/dto/redirect/AdyenRedirectResultDto.java | 6369 | package com.sfl.pms.services.payment.redirect.dto.redirect;
import com.sfl.pms.services.payment.provider.model.PaymentProviderType;
import com.sfl.pms.services.payment.redirect.dto.PaymentProviderRedirectResultDto;
import com.sfl.pms.services.payment.redirect.model.adyen.AdyenRedirectResult;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
/**
* User: Ruben Dilanyan
* Company: SFL LLC
* Date: 7/11/15
* Time: 11:49 AM
*/
public class AdyenRedirectResultDto extends PaymentProviderRedirectResultDto<AdyenRedirectResult> {
private static final long serialVersionUID = -6250401111063370168L;
/* Properties */
private String authResult;
private String pspReference;
private String merchantReference;
private String skinCode;
private String merchantSig;
private String paymentMethod;
private String shopperLocale;
private String merchantReturnData;
public AdyenRedirectResultDto() {
super(PaymentProviderType.ADYEN);
}
public AdyenRedirectResultDto(final String authResult, final String pspReference, final String merchantReference, final String skinCode, final String merchantSig, final String paymentMethod, final String shopperLocale, final String merchantReturnData) {
super(PaymentProviderType.ADYEN);
this.authResult = authResult;
this.pspReference = pspReference;
this.merchantReference = merchantReference;
this.skinCode = skinCode;
this.merchantSig = merchantSig;
this.paymentMethod = paymentMethod;
this.shopperLocale = shopperLocale;
this.merchantReturnData = merchantReturnData;
}
/* Properties getters and setters */
public String getAuthResult() {
return authResult;
}
public void setAuthResult(final String authResult) {
this.authResult = authResult;
}
public String getPspReference() {
return pspReference;
}
public void setPspReference(final String pspReference) {
this.pspReference = pspReference;
}
public String getMerchantReference() {
return merchantReference;
}
public void setMerchantReference(final String merchantReference) {
this.merchantReference = merchantReference;
}
public String getSkinCode() {
return skinCode;
}
public void setSkinCode(final String skinCode) {
this.skinCode = skinCode;
}
public String getMerchantSig() {
return merchantSig;
}
public void setMerchantSig(final String merchantSig) {
this.merchantSig = merchantSig;
}
public String getPaymentMethod() {
return paymentMethod;
}
public void setPaymentMethod(final String paymentMethod) {
this.paymentMethod = paymentMethod;
}
public String getShopperLocale() {
return shopperLocale;
}
public void setShopperLocale(final String shopperLocale) {
this.shopperLocale = shopperLocale;
}
public String getMerchantReturnData() {
return merchantReturnData;
}
public void setMerchantReturnData(final String merchantReturnData) {
this.merchantReturnData = merchantReturnData;
}
/* Public interface methods */
@Override
public void updateDomainEntityProperties(final AdyenRedirectResult redirectResult) {
super.updateDomainEntityProperties(redirectResult);
redirectResult.setSkinCode(getSkinCode());
redirectResult.setShopperLocale(getShopperLocale());
redirectResult.setPspReference(getPspReference());
redirectResult.setPaymentMethod(getPaymentMethod());
redirectResult.setAuthResult(getAuthResult());
redirectResult.setMerchantReference(getMerchantReference());
redirectResult.setMerchantReturnData(getMerchantReturnData());
redirectResult.setMerchantSig(getMerchantSig());
}
/* Equals, HashCode and ToString */
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (!(o instanceof AdyenRedirectResultDto)) {
return false;
}
final AdyenRedirectResultDto that = (AdyenRedirectResultDto) o;
final EqualsBuilder builder = new EqualsBuilder();
builder.appendSuper(super.equals(that));
builder.append(this.getAuthResult(), that.getAuthResult());
builder.append(this.getPspReference(), that.getPspReference());
builder.append(this.getMerchantReference(), that.getMerchantReference());
builder.append(this.getSkinCode(), that.getSkinCode());
builder.append(this.getMerchantSig(), that.getMerchantSig());
builder.append(this.getPaymentMethod(), that.getPaymentMethod());
builder.append(this.getShopperLocale(), that.getShopperLocale());
builder.append(this.getMerchantReturnData(), that.getMerchantReturnData());
return builder.isEquals();
}
@Override
public int hashCode() {
final HashCodeBuilder builder = new HashCodeBuilder();
builder.appendSuper(super.hashCode());
builder.append(this.getAuthResult());
builder.append(this.getPspReference());
builder.append(this.getMerchantReference());
builder.append(this.getSkinCode());
builder.append(this.getMerchantSig());
builder.append(this.getPaymentMethod());
builder.append(this.getShopperLocale());
builder.append(this.getMerchantReturnData());
return builder.build();
}
@Override
public String toString() {
final ToStringBuilder builder = new ToStringBuilder(this);
builder.appendSuper(super.toString());
builder.append("authResult", this.getAuthResult());
builder.append("pspReference", this.getPspReference());
builder.append("merchantReference", this.getMerchantReference());
builder.append("skinCode", this.getSkinCode());
builder.append("merchantSig", this.getMerchantSig());
builder.append("paymentMethod", this.getPaymentMethod());
builder.append("shopperLocale", this.getShopperLocale());
builder.append("merchantReturnData", this.getMerchantReturnData());
return builder.build();
}
}
| apache-2.0 |
Gridtec/lambda4j | lambda4j/src-gen/main/java/at/gridtec/lambda4j/function/bi/obj/ObjLongToDoubleFunction.java | 34814 | /*
* Copyright (c) 2016 Gridtec. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.gridtec.lambda4j.function.bi.obj;
import at.gridtec.lambda4j.Lambda;
import at.gridtec.lambda4j.consumer.bi.obj.ObjLongConsumer2;
import at.gridtec.lambda4j.function.BooleanFunction;
import at.gridtec.lambda4j.function.ByteFunction;
import at.gridtec.lambda4j.function.CharFunction;
import at.gridtec.lambda4j.function.FloatFunction;
import at.gridtec.lambda4j.function.ShortFunction;
import at.gridtec.lambda4j.function.bi.BiFunction2;
import at.gridtec.lambda4j.function.bi.conversion.BiBooleanToDoubleFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiByteToDoubleFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiCharToDoubleFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiFloatToDoubleFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiIntToDoubleFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiLongToDoubleFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiShortToDoubleFunction;
import at.gridtec.lambda4j.function.bi.to.ToDoubleBiFunction2;
import at.gridtec.lambda4j.function.conversion.BooleanToLongFunction;
import at.gridtec.lambda4j.function.conversion.ByteToLongFunction;
import at.gridtec.lambda4j.function.conversion.CharToLongFunction;
import at.gridtec.lambda4j.function.conversion.DoubleToByteFunction;
import at.gridtec.lambda4j.function.conversion.DoubleToCharFunction;
import at.gridtec.lambda4j.function.conversion.DoubleToFloatFunction;
import at.gridtec.lambda4j.function.conversion.DoubleToShortFunction;
import at.gridtec.lambda4j.function.conversion.FloatToLongFunction;
import at.gridtec.lambda4j.function.conversion.LongToDoubleFunction2;
import at.gridtec.lambda4j.function.conversion.ShortToLongFunction;
import at.gridtec.lambda4j.function.to.ToDoubleFunction2;
import at.gridtec.lambda4j.operator.binary.DoubleBinaryOperator2;
import at.gridtec.lambda4j.predicate.bi.obj.ObjLongPredicate;
import org.apache.commons.lang3.tuple.Pair;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.DoubleConsumer;
import java.util.function.DoubleFunction;
import java.util.function.DoublePredicate;
import java.util.function.DoubleToIntFunction;
import java.util.function.DoubleToLongFunction;
import java.util.function.DoubleUnaryOperator;
import java.util.function.Function;
import java.util.function.IntFunction;
import java.util.function.IntToLongFunction;
import java.util.function.LongFunction;
import java.util.function.LongToDoubleFunction;
import java.util.function.LongUnaryOperator;
import java.util.function.ToDoubleFunction;
import java.util.function.ToLongFunction;
/**
* Represents an operation that accepts one object-valued and one {@code long}-valued input argument and produces a
* {@code double}-valued result.
* This is a (reference, long) specialization of {@link BiFunction2}.
* <p>
* This is a {@link FunctionalInterface} whose functional method is {@link #applyAsDouble(Object, long)}.
*
* @param <T> The type of the first argument to the function
* @see BiFunction2
*/
@SuppressWarnings("unused")
@FunctionalInterface
public interface ObjLongToDoubleFunction<T> extends Lambda {
/**
* Constructs a {@link ObjLongToDoubleFunction} based on a lambda expression or a method reference. Thereby the
* given lambda expression or method reference is returned on an as-is basis to implicitly transform it to the
* desired type. With this method, it is possible to ensure that correct type is used from lambda expression or
* method reference.
*
* @param <T> The type of the first argument to the function
* @param expression A lambda expression or (typically) a method reference, e.g. {@code this::method}
* @return A {@code ObjLongToDoubleFunction} from given lambda expression or method reference.
* @implNote This implementation allows the given argument to be {@code null}, but only if {@code null} given,
* {@code null} will be returned.
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html#syntax">Lambda
* Expression</a>
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/methodreferences.html">Method Reference</a>
*/
static <T> ObjLongToDoubleFunction<T> of(@Nullable final ObjLongToDoubleFunction<T> expression) {
return expression;
}
/**
* Calls the given {@link ObjLongToDoubleFunction} with the given arguments and returns its result.
*
* @param <T> The type of the first argument to the function
* @param function The function to be called
* @param t The first argument to the function
* @param value The second argument to the function
* @return The result from the given {@code ObjLongToDoubleFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
static <T> double call(@Nonnull final ObjLongToDoubleFunction<? super T> function, T t, long value) {
Objects.requireNonNull(function);
return function.applyAsDouble(t, value);
}
/**
* Creates a {@link ObjLongToDoubleFunction} which uses the {@code first} parameter of this one as argument for the
* given {@link ToDoubleFunction}.
*
* @param <T> The type of the first argument to the function
* @param function The function which accepts the {@code first} parameter of this one
* @return Creates a {@code ObjLongToDoubleFunction} which uses the {@code first} parameter of this one as argument
* for the given {@code ToDoubleFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T> ObjLongToDoubleFunction<T> onlyFirst(@Nonnull final ToDoubleFunction<? super T> function) {
Objects.requireNonNull(function);
return (t, value) -> function.applyAsDouble(t);
}
/**
* Creates a {@link ObjLongToDoubleFunction} which uses the {@code second} parameter of this one as argument for the
* given {@link LongToDoubleFunction}.
*
* @param <T> The type of the first argument to the function
* @param function The function which accepts the {@code second} parameter of this one
* @return Creates a {@code ObjLongToDoubleFunction} which uses the {@code second} parameter of this one as argument
* for the given {@code LongToDoubleFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T> ObjLongToDoubleFunction<T> onlySecond(@Nonnull final LongToDoubleFunction function) {
Objects.requireNonNull(function);
return (t, value) -> function.applyAsDouble(value);
}
/**
* Creates a {@link ObjLongToDoubleFunction} which always returns a given value.
*
* @param <T> The type of the first argument to the function
* @param ret The return value for the constant
* @return A {@code ObjLongToDoubleFunction} which always returns a given value.
*/
@Nonnull
static <T> ObjLongToDoubleFunction<T> constant(double ret) {
return (t, value) -> ret;
}
/**
* Applies this function to the given arguments.
*
* @param t The first argument to the function
* @param value The second argument to the function
* @return The return value from the function, which is its result.
*/
double applyAsDouble(T t, long value);
/**
* Applies this function partially to some arguments of this one, producing a {@link LongToDoubleFunction2} as
* result.
*
* @param t The first argument to this function used to partially apply this function
* @return A {@code LongToDoubleFunction2} that represents this function partially applied the some arguments.
*/
@Nonnull
default LongToDoubleFunction2 papplyAsDouble(T t) {
return (value) -> this.applyAsDouble(t, value);
}
/**
* Applies this function partially to some arguments of this one, producing a {@link ToDoubleFunction2} as result.
*
* @param value The second argument to this function used to partially apply this function
* @return A {@code ToDoubleFunction2} that represents this function partially applied the some arguments.
*/
@Nonnull
default ToDoubleFunction2<T> papplyAsDouble(long value) {
return (t) -> this.applyAsDouble(t, value);
}
/**
* Returns the number of arguments for this function.
*
* @return The number of arguments for this function.
* @implSpec The default implementation always returns {@code 2}.
*/
@Nonnegative
default int arity() {
return 2;
}
/**
* Returns a composed {@link ToDoubleBiFunction2} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <A> The type of the argument to the first given function, and of composed function
* @param <B> The type of the argument to the second given function, and of composed function
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code ToDoubleBiFunction2} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to handle every type.
*/
@Nonnull
default <A, B> ToDoubleBiFunction2<A, B> compose(@Nonnull final Function<? super A, ? extends T> before1,
@Nonnull final ToLongFunction<? super B> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (a, b) -> applyAsDouble(before1.apply(a), before2.applyAsLong(b));
}
/**
* Returns a composed {@link BiBooleanToDoubleFunction} that first applies the {@code before} functions to its
* input, and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code boolean} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiBooleanToDoubleFunction} that first applies the {@code before} functions to its
* input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default BiBooleanToDoubleFunction composeFromBoolean(@Nonnull final BooleanFunction<? extends T> before1,
@Nonnull final BooleanToLongFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.apply(value1), before2.applyAsLong(value2));
}
/**
* Returns a composed {@link BiByteToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code byte} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiByteToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default BiByteToDoubleFunction composeFromByte(@Nonnull final ByteFunction<? extends T> before1,
@Nonnull final ByteToLongFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.apply(value1), before2.applyAsLong(value2));
}
/**
* Returns a composed {@link BiCharToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code char} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiCharToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default BiCharToDoubleFunction composeFromChar(@Nonnull final CharFunction<? extends T> before1,
@Nonnull final CharToLongFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.apply(value1), before2.applyAsLong(value2));
}
/**
* Returns a composed {@link DoubleBinaryOperator2} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code double} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code DoubleBinaryOperator2} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default DoubleBinaryOperator2 composeFromDouble(@Nonnull final DoubleFunction<? extends T> before1,
@Nonnull final DoubleToLongFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.apply(value1), before2.applyAsLong(value2));
}
/**
* Returns a composed {@link BiFloatToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code float} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiFloatToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default BiFloatToDoubleFunction composeFromFloat(@Nonnull final FloatFunction<? extends T> before1,
@Nonnull final FloatToLongFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.apply(value1), before2.applyAsLong(value2));
}
/**
* Returns a composed {@link BiIntToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code int} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiIntToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default BiIntToDoubleFunction composeFromInt(@Nonnull final IntFunction<? extends T> before1,
@Nonnull final IntToLongFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.apply(value1), before2.applyAsLong(value2));
}
/**
* Returns a composed {@link BiLongToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code long} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second operator to apply before this function is applied
* @return A composed {@code BiLongToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default BiLongToDoubleFunction composeFromLong(@Nonnull final LongFunction<? extends T> before1,
@Nonnull final LongUnaryOperator before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.apply(value1), before2.applyAsLong(value2));
}
/**
* Returns a composed {@link BiShortToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code short} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiShortToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default BiShortToDoubleFunction composeFromShort(@Nonnull final ShortFunction<? extends T> before1,
@Nonnull final ShortToLongFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.apply(value1), before2.applyAsLong(value2));
}
/**
* Returns a composed {@link ObjLongFunction} that first applies this function to its input, and then applies the
* {@code after} function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <S> The type of return value from the {@code after} function, and of the composed function
* @param after The function to apply after this function is applied
* @return A composed {@code ObjLongFunction} that first applies this function to its input, and then applies the
* {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to return every type.
*/
@Nonnull
default <S> ObjLongFunction<T, S> andThen(@Nonnull final DoubleFunction<? extends S> after) {
Objects.requireNonNull(after);
return (t, value) -> after.apply(applyAsDouble(t, value));
}
/**
* Returns a composed {@link ObjLongPredicate} that first applies this function to its input, and then applies the
* {@code after} predicate to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code boolean}.
*
* @param after The predicate to apply after this function is applied
* @return A composed {@code ObjLongPredicate} that first applies this function to its input, and then applies the
* {@code after} predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default ObjLongPredicate<T> andThenToBoolean(@Nonnull final DoublePredicate after) {
Objects.requireNonNull(after);
return (t, value) -> after.test(applyAsDouble(t, value));
}
/**
* Returns a composed {@link ObjLongToByteFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code byte}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjLongToByteFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default ObjLongToByteFunction<T> andThenToByte(@Nonnull final DoubleToByteFunction after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsByte(applyAsDouble(t, value));
}
/**
* Returns a composed {@link ObjLongToCharFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code char}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjLongToCharFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default ObjLongToCharFunction<T> andThenToChar(@Nonnull final DoubleToCharFunction after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsChar(applyAsDouble(t, value));
}
/**
* Returns a composed {@link ObjLongToDoubleFunction} that first applies this function to its input, and then
* applies the {@code after} operator to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code double}.
*
* @param after The operator to apply after this function is applied
* @return A composed {@code ObjLongToDoubleFunction} that first applies this function to its input, and then
* applies the {@code after} operator to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default ObjLongToDoubleFunction<T> andThenToDouble(@Nonnull final DoubleUnaryOperator after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsDouble(applyAsDouble(t, value));
}
/**
* Returns a composed {@link ObjLongToFloatFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code float}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjLongToFloatFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default ObjLongToFloatFunction<T> andThenToFloat(@Nonnull final DoubleToFloatFunction after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsFloat(applyAsDouble(t, value));
}
/**
* Returns a composed {@link ObjLongToIntFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code int}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjLongToIntFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default ObjLongToIntFunction<T> andThenToInt(@Nonnull final DoubleToIntFunction after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsInt(applyAsDouble(t, value));
}
/**
* Returns a composed {@link ObjLongToLongFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code long}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjLongToLongFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default ObjLongToLongFunction<T> andThenToLong(@Nonnull final DoubleToLongFunction after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsLong(applyAsDouble(t, value));
}
/**
* Returns a composed {@link ObjLongToShortFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code short}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjLongToShortFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default ObjLongToShortFunction<T> andThenToShort(@Nonnull final DoubleToShortFunction after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsShort(applyAsDouble(t, value));
}
/**
* Returns a composed {@link ObjLongConsumer2} that fist applies this function to its input, and then consumes the
* result using the given {@link DoubleConsumer}. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation.
*
* @param consumer The operation which consumes the result from this operation
* @return A composed {@code ObjLongConsumer2} that first applies this function to its input, and then consumes the
* result using the given {@code DoubleConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
default ObjLongConsumer2<T> consume(@Nonnull final DoubleConsumer consumer) {
Objects.requireNonNull(consumer);
return (t, value) -> consumer.accept(applyAsDouble(t, value));
}
/**
* Returns a memoized (caching) version of this {@link ObjLongToDoubleFunction}. Whenever it is called, the mapping
* between the input parameters and the return value is preserved in a cache, making subsequent calls returning the
* memoized value instead of computing the return value again.
* <p>
* Unless the function and therefore the used cache will be garbage-collected, it will keep all memoized values
* forever.
*
* @return A memoized (caching) version of this {@code ObjLongToDoubleFunction}.
* @implSpec This implementation does not allow the input parameters or return value to be {@code null} for the
* resulting memoized function, as the cache used internally does not permit {@code null} keys or values.
* @implNote The returned memoized function can be safely used concurrently from multiple threads which makes it
* thread-safe.
*/
@Nonnull
default ObjLongToDoubleFunction<T> memoized() {
if (isMemoized()) {
return this;
} else {
final Map<Pair<T, Long>, Double> cache = new ConcurrentHashMap<>();
final Object lock = new Object();
return (ObjLongToDoubleFunction<T> & Memoized) (t, value) -> {
final double returnValue;
synchronized (lock) {
returnValue = cache.computeIfAbsent(Pair.of(t, value),
key -> applyAsDouble(key.getLeft(), key.getRight()));
}
return returnValue;
};
}
}
/**
* Returns a composed {@link BiFunction2} which represents this {@link ObjLongToDoubleFunction}. Thereby the
* primitive input argument for this function is autoboxed. This method provides the possibility to use this {@code
* ObjLongToDoubleFunction} with methods provided by the {@code JDK}.
*
* @return A composed {@code BiFunction2} which represents this {@code ObjLongToDoubleFunction}.
*/
@Nonnull
default BiFunction2<T, Long, Double> boxed() {
return this::applyAsDouble;
}
} | apache-2.0 |
Spieldichein/ShoprX | Android/ShoprX/src/main/java/de/tum/in/schlichter/shoprx/stereotype/algorithm/StereotypeFiltering.java | 3994 | package de.tum.in.schlichter.shoprx.stereotype.algorithm;
import android.util.Log;
import java.util.List;
import java.util.Map;
import de.tum.in.schlichter.shoprx.algorithm.model.Item;
import de.tum.in.schlichter.shoprx.algorithm.model.Label;
import de.tum.in.schlichter.shoprx.stereotype.stereotypes.AbstractStereotype;
/**
* Created by Yannick on 09.02.15.
*
* This class does the Stereotype based filtering. It searches within the stereotype settings for
* colors and brands that this person might like. Brands are more important than colours in this
* classification.
* The user's stereotype was determined via {@link de.tum.in.schlichter.shoprx.stereotype.controller.StereotypeDeterminator}
* and is now exploited in order to pre-filter the case base.
*/
public class StereotypeFiltering {
/**
* Computes the proximity of a list of clothing items for a certain
* stereotype and sorts them in descending order.
*
* @param stereotype
* the stereotype to which the proximity is to be computed
* @param clothingItems
* the list of clothing items for which the proximity is to be
* computed
* @return a sorted list of clothing items in descending order of their
* proximity to a stereotype
*/
public List<Item> computeStereotypeProximity(AbstractStereotype stereotype, List<Item> clothingItems) {
for (Item item : clothingItems) {
item.setProximityToStereotype(computeStereotypeProximity(stereotype, item));
//Log.i("Putting proximity " + item.getProximityToStereotype(), item.toString());
}
//logTop10Items(clothingItems);
return clothingItems;
}
/**
* Logs the top 10 items onto the console.
*
* @param sortedClothingItems the sorted list with the top 10 clothing items.
*/
@SuppressWarnings("UnusedDeclaration")
private void logTop10Items(List<Item> sortedClothingItems) {
int i = 1;
for (Item item : sortedClothingItems) {
if (i > 10)
break;
Log.i(i + ": " + item.toString(), " Proximity: " + item.getProximityToStereotype());
i++;
}
}
/**
* Compute the proximity of a clothing item to a stereotype.
*
* @param stereotype
* the stereotype to which the proximity is to be computed
* @param item
* the item of interest
* @return the proximity on a scale of 0 to 10;
*/
public double computeStereotypeProximity(AbstractStereotype stereotype, Item item) {
double proximity = 0.0;
int hits = 0;
// compute attribute proximity first
String[] haystack = item.getNameParts();
// get all relevant attributes for the active stereotype
Map<String, Integer> attributeProbabilityMap = stereotype.getAttributeProbabilityMap();
// check whether these attributes appear in the item, if so add their
// weight to the proximity measure
for (String needle : haystack) {
if (attributeProbabilityMap.containsKey(needle)) {
// if the needle is found add it's weight divided by the number
// of all attributes of the stereotype
int weight = attributeProbabilityMap.get(needle);
proximity += weight;
hits++;
}
}
// depending on number of attribute hits set weight for brand impact
int brandImpact = (hits> 2 ? hits / 2 : 1);
Map<Label.Value, Integer> brandProbabilityMap = stereotype.getBrandProbabilityMap();
Label.Value label = (Label.Value) item.attributes().getAttributeById(Label.ID).currentValue();
if (brandProbabilityMap.containsKey(label)){
proximity += brandProbabilityMap.get(label) * brandImpact;
hits += brandImpact;
}
return hits > 0 ? proximity / hits : 0.0;
}
}
| apache-2.0 |
KernelHaven/KernelHaven | test/net/ssehub/kernel_haven/util/io/TableRowMetadataTest.java | 7164 | /*
* Copyright 2017-2019 University of Hildesheim, Software Systems Engineering
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.ssehub.kernel_haven.util.io;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import org.junit.Test;
import net.ssehub.kernel_haven.util.FormatException;
import net.ssehub.kernel_haven.util.io.ITableReader.Factory;
import net.ssehub.kernel_haven.util.null_checks.NonNull;
/**
* Tests the {@link TableRowMetadata} class.
*
* @author Adam
*/
public class TableRowMetadataTest {
/**
* A simple table row structure to be written.
*/
@TableRow
public static class Simple {
/**
* A factory for reading a {@link Simple} object.
*/
public static class SimpleFactory implements Factory<@NonNull Simple> {
@Override
public Simple create(@NonNull String @NonNull [] fields) throws FormatException {
if (fields.length != 2) {
throw new FormatException();
}
int integer;
try {
integer = Integer.parseInt(fields[0]);
} catch (NumberFormatException e) {
throw new FormatException(e);
}
String str = fields[1];
return new Simple(integer, str);
}
}
private int integer;
private String str;
/**
* Creates an instance.
*
* @param integer The integer value.
* @param str The string value.
*/
public Simple(int integer, String str) {
this.integer = integer;
this.str = str;
}
/**
* The integer value.
* @return Integer value.
*/
@TableElement(name = "Integer", index = 0)
public int getInteger() {
return integer;
}
/**
* The string value.
*
* @return The String value.
*/
@TableElement(name = "String", index = 1)
public String getStr() {
return str;
}
}
/**
* A simple table row structure to be written, without annotations.
*/
public static class NoAnnotations {
private char character;
private String str;
/**
* Creates an instance.
*
* @param character The character value.
* @param str The string value.
*/
public NoAnnotations(char character, String str) {
this.character = character;
this.str = str;
}
@Override
public String toString() {
return character + " " + str;
}
}
/**
* Small class with a {@link TableElement} which is private.
*/
@TableRow
private static class PrivateTableRow {
/**
* Test method.
*
* @return "test".
*/
@TableElement(index = 0, name = "A")
private String getA() {
return "a";
}
/**
* Test method.
*
* @return "test".
*/
@TableElement(index = 1, name = "B")
public String getB() {
return "b";
}
}
/**
* Tests the isTablRow() method.
*/
@Test
public void testIsTableRow() {
assertThat(TableRowMetadata.isTableRow(Simple.class), is(true));
assertThat(TableRowMetadata.isTableRow(NoAnnotations.class), is(false));
}
/**
* Tests whether the constructor throws an exception for non-annotated classes.
*/
@Test(expected = IllegalArgumentException.class)
public void testInvalid() {
new TableRowMetadata(NoAnnotations.class);
}
/**
* Tests whether the header names are read correctly.
*/
@Test
public void testHeaders() {
TableRowMetadata metadata = new TableRowMetadata(Simple.class);
assertThat(metadata.getHeaders().length, is(2));
assertThat(metadata.getHeaders()[0], is("Integer"));
assertThat(metadata.getHeaders()[1], is("String"));
}
/**
* Tests the isSameClass() method.
*/
@Test
public void testIsSameClass() {
TableRowMetadata metadata = new TableRowMetadata(Simple.class);
assertThat(metadata.isSameClass(new Simple(1, "")), is(true));
assertThat(metadata.isSameClass(new NoAnnotations('a', "")), is(false));
}
/**
* Tests the getContent() method.
*
* @throws ReflectiveOperationException unwanted.
*/
@Test
public void testGetContent() throws ReflectiveOperationException {
TableRowMetadata metadata = new TableRowMetadata(Simple.class);
Object[] content = metadata.getContent(new Simple(34, "thirtythree"));
assertThat(content.length, is(2));
assertThat(content[0], is(34));
assertThat(content[1], is("thirtythree"));
}
/**
* Tests the getContent() method with an invalid instance.
*
* @throws ReflectiveOperationException wanted.
*/
@Test(expected = ReflectiveOperationException.class)
public void testGetContentInvalid() throws ReflectiveOperationException {
TableRowMetadata metadata = new TableRowMetadata(Simple.class);
metadata.getContent(new NoAnnotations('b', "bee"));
}
/**
* Tests that a private method annotated with {@link TableElement} is correctly ignored.
*
* @throws ReflectiveOperationException unwanted.
*/
@Test
public void testIgnorePrivateMethod() throws ReflectiveOperationException {
TableRowMetadata metadata = new TableRowMetadata(PrivateTableRow.class);
Object[] content = metadata.getContent(new PrivateTableRow());
// "a" should be ignored, because its private
assertThat(content, is(new String[] {"b"}));
}
/**
* Tests that a <code>null</code> value is correctly turned into an empty string.
*
* @throws ReflectiveOperationException unwanted.
*/
@Test
public void testNullValue() throws ReflectiveOperationException {
TableRowMetadata metadata = new TableRowMetadata(Simple.class);
Object[] content = metadata.getContent(new Simple(0, null));
assertThat(content, is(new Object[] {0, null}));
}
}
| apache-2.0 |
ChoicesWang/DesignDemo | app/src/main/java/com/support/design/fragment/CustomPagerAdapter.java | 1030 | package com.support.design.fragment;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import java.util.ArrayList;
import java.util.List;
/**
* FragmentPagerAdapter
* Created by Administrator on 2015/6/12.
*/
public class CustomPagerAdapter extends FragmentPagerAdapter {
private final List<Fragment> mFragments = new ArrayList<>();
private final List<String> mFragmentTitles = new ArrayList<>();
public CustomPagerAdapter(FragmentManager fm) {
super(fm);
}
public void addFragment(Fragment fragment, String title) {
mFragments.add(fragment);
mFragmentTitles.add(title);
}
@Override
public Fragment getItem(int position) {
return mFragments.get(position);
}
@Override
public int getCount() {
return mFragments.size();
}
@Override
public CharSequence getPageTitle(int position) {
return mFragmentTitles.get(position);
}
}
| apache-2.0 |
manstis/drools | drools-test-coverage/test-compiler-integration/src/test/java/org/drools/mvel/compiler/compiler/QueryBuilderTest.java | 7271 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.mvel.compiler.compiler;
import java.util.Arrays;
import org.drools.compiler.builder.impl.KnowledgeBuilderImpl;
import org.drools.compiler.lang.descr.AndDescr;
import org.drools.compiler.lang.descr.BindingDescr;
import org.drools.compiler.lang.descr.ExprConstraintDescr;
import org.drools.compiler.lang.descr.ExprConstraintDescr.Type;
import org.drools.compiler.lang.descr.PackageDescr;
import org.drools.compiler.lang.descr.PatternDescr;
import org.drools.compiler.lang.descr.QueryDescr;
import org.drools.compiler.lang.descr.RuleDescr;
import org.drools.kiesession.rulebase.InternalKnowledgeBase;
import org.drools.core.test.model.DroolsTestCase;
import org.drools.kiesession.rulebase.KnowledgeBaseFactory;
import org.drools.mvel.compiler.Cheese;
import org.drools.mvel.compiler.Person;
import org.junit.Test;
import org.kie.api.definition.KiePackage;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.rule.QueryResults;
import static org.junit.Assert.assertEquals;
public class QueryBuilderTest extends DroolsTestCase {
@Test
public void testRuleWithQuery() throws Exception {
final KnowledgeBuilderImpl builder = new KnowledgeBuilderImpl();
final PackageDescr packageDescr = new PackageDescr( "p1" );
QueryDescr queryDescr = new QueryDescr( "query1" );
queryDescr.addParameter( "String",
"$name" );
queryDescr.addParameter( "int",
"$age" );
queryDescr.addParameter( "String",
"$likes" );
packageDescr.addRule( queryDescr );
AndDescr lhs = new AndDescr();
queryDescr.setLhs( lhs );
PatternDescr pattern = new PatternDescr( Person.class.getName() );
lhs.addDescr( pattern );
pattern.addConstraint( new BindingDescr( "$name", "name", true ) );
pattern.addConstraint( new BindingDescr( "$age", "age", true ) );
pattern.addConstraint( new BindingDescr( "$likes", "likes", true ) );
RuleDescr ruleDescr = new RuleDescr( "rule-1" );
packageDescr.addRule( ruleDescr );
lhs = new AndDescr();
ruleDescr.setLhs( lhs );
pattern = new PatternDescr( Cheese.class.getName() );
lhs.addDescr( pattern );
pattern.addConstraint( new BindingDescr( "$type",
"type" ) );
pattern = new PatternDescr( "query1" );
pattern.setQuery( true );
lhs.addDescr( pattern );
ExprConstraintDescr expr = new ExprConstraintDescr("'bobba'");
expr.setPosition( 0 );
expr.setType( Type.POSITIONAL );
pattern.addConstraint(expr);
expr = new ExprConstraintDescr("$age");
expr.setPosition( 1 );
expr.setType( Type.POSITIONAL );
pattern.addConstraint( expr );
expr = new ExprConstraintDescr("$type");
expr.setPosition( 2 );
expr.setType( Type.POSITIONAL );
pattern.addConstraint( expr );
ruleDescr.setConsequence( "System.out.println(\"age: \" + $age);" );
builder.addPackage( packageDescr );
assertLength( 0,
builder.getErrors().getErrors() );
InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addPackages(Arrays.asList( new KiePackage[] { builder.getPackage(packageDescr.getName()) } ));
final KieSession session = kbase.newKieSession();
session.insert( new Person( "bobba",
"stilton",
90 ) );
session.insert( new Person( "bobba",
"brie",
80 ) );
session.insert( new Person( "bobba",
"brie",
75 ) );
session.insert( new Person( "darth",
"brie",
100 ) );
session.insert( new Person( "luke",
"brie",
25 ) );
session.insert( new Cheese( "brie",
25 ) );
session.fireAllRules();
}
@Test
public void testQuery() throws Exception {
final KnowledgeBuilderImpl builder = new KnowledgeBuilderImpl();
final PackageDescr packageDescr = new PackageDescr( "p1" );
final QueryDescr queryDescr = new QueryDescr( "query1" );
queryDescr.addParameter( "String",
"$type" );
packageDescr.addRule( queryDescr );
final AndDescr lhs = new AndDescr();
queryDescr.setLhs( lhs );
final PatternDescr pattern = new PatternDescr( Cheese.class.getName(),
"stilton" );
lhs.addDescr( pattern );
pattern.addConstraint( new ExprConstraintDescr("type == $type") );
// Another query, no parameters
QueryDescr queryDescr2 = new QueryDescr( "query2" );
packageDescr.addRule( queryDescr2 );
AndDescr lhs2 = new AndDescr();
queryDescr2.setLhs( lhs2 );
PatternDescr pattern2 = new PatternDescr( Cheese.class.getName() );
lhs2.addDescr( pattern2 );
builder.addPackage( packageDescr );
assertLength( 0,
builder.getErrors().getErrors() );
InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addPackages(Arrays.asList( new KiePackage[] { builder.getPackage(packageDescr.getName()) } ) );
final KieSession session = kbase.newKieSession();
session.insert( new Cheese( "stilton",
15 ) );
QueryResults results = session.getQueryResults( "query1",
"stilton" );
assertEquals( 1,
results.size() );
Object object = results.iterator().next().get("stilton");
assertEquals( new Cheese( "stilton",
15 ),
object );
results = session.getQueryResults( "query1",
new Object[]{"cheddar"} );
assertEquals( 0,
results.size() );
session.insert(new Cheese("dolcelatte",
20));
results = session.getQueryResults( "query2",
new Object[]{} );
assertEquals( 2,
results.size() );
}
}
| apache-2.0 |
slinstaedt/ddd4j-common-values | src/main/java/org/ddd4j/spi/ServiceFactory.java | 825 | package org.ddd4j.spi;
import org.ddd4j.util.Require;
import org.ddd4j.util.Throwing;
import org.ddd4j.util.Throwing.TConsumer;
@FunctionalInterface
public interface ServiceFactory<T> {
T create(Context context) throws Exception;
default T createUnchecked(Context context) {
try {
return create(context);
} catch (Exception e) {
return Throwing.unchecked(e);
}
}
default void destroy(T service) throws Exception {
}
default ServiceFactory<T> withDestructor(TConsumer<? super T> destructor) {
Require.nonNull(destructor);
return new ServiceFactory<T>() {
@Override
public T create(Context context) throws Exception {
return ServiceFactory.this.create(context);
}
@Override
public void destroy(T service) throws Exception {
destructor.acceptChecked(service);
}
};
}
}
| apache-2.0 |
bobmcwhirter/drools | drools-solver/drools-solver-core/src/main/java/org/drools/solver/core/localsearch/decider/selector/CompositeSelector.java | 2648 | package org.drools.solver.core.localsearch.decider.selector;
import java.util.ArrayList;
import java.util.List;
import org.drools.solver.core.localsearch.LocalSearchSolver;
import org.drools.solver.core.localsearch.LocalSearchSolverScope;
import org.drools.solver.core.localsearch.StepScope;
import org.drools.solver.core.move.Move;
/**
* @author Geoffrey De Smet
*/
public class CompositeSelector extends AbstractSelector {
protected List<Selector> selectorList;
public void setSelectorList(List<Selector> selectorList) {
this.selectorList = selectorList;
}
@Override
public void setLocalSearchSolver(LocalSearchSolver localSearchSolver) {
super.setLocalSearchSolver(localSearchSolver);
for (Selector selector : selectorList) {
selector.setLocalSearchSolver(localSearchSolver);
}
}
// ************************************************************************
// Worker methods
// ************************************************************************
@Override
public void solvingStarted(LocalSearchSolverScope localSearchSolverScope) {
for (Selector selector : selectorList) {
selector.solvingStarted(localSearchSolverScope);
}
}
@Override
public void beforeDeciding(StepScope stepScope) {
for (Selector selector : selectorList) {
selector.beforeDeciding(stepScope);
}
}
public List<Move> selectMoveList(StepScope stepScope) {
int totalSize = 0;
List<List<Move>> subMoveLists = new ArrayList<List<Move>>(selectorList.size());
for (Selector selector : selectorList) {
List<Move> subMoveList = selector.selectMoveList(stepScope);
totalSize += subMoveList.size();
subMoveLists.add(subMoveList);
}
List<Move> moveList = new ArrayList<Move>(totalSize);
for (List<Move> subMoveList : subMoveLists) {
moveList.addAll(subMoveList);
}
// TODO support overal shuffling
return moveList;
}
@Override
public void stepDecided(StepScope stepScope) {
for (Selector selector : selectorList) {
selector.stepDecided(stepScope);
}
}
@Override
public void stepTaken(StepScope stepScope) {
for (Selector selector : selectorList) {
selector.stepTaken(stepScope);
}
}
@Override
public void solvingEnded(LocalSearchSolverScope localSearchSolverScope) {
for (Selector selector : selectorList) {
selector.solvingEnded(localSearchSolverScope);
}
}
} | apache-2.0 |
garyhu1/collapselrecycler | app/src/main/java/com/garyhu/citypickerdemo/widget/behaviordemo/ToolBarIconBehavior.java | 2411 | package com.garyhu.citypickerdemo.widget.behaviordemo;
import android.content.Context;
import android.support.design.widget.CoordinatorLayout;
import android.util.AttributeSet;
import android.view.View;
import android.widget.ImageView;
import com.garyhu.citypickerdemo.R;
/**
* Created by cqll on 2016/12/13.
*/
public class ToolBarIconBehavior extends CoordinatorLayout.Behavior {
private float mHeightToolbar, mDependencyMaxTranslation, mIconSizeStart;
private ImageView mImgBack, mImgShare;
public ToolBarIconBehavior() {
}
public ToolBarIconBehavior(Context context, AttributeSet attrs) {
super(context, attrs);
mHeightToolbar = context.getResources().getDimensionPixelOffset(R.dimen.toolbar_height);
mIconSizeStart = context.getResources().getDimensionPixelOffset(R.dimen.img_icon_height_start);
}
@Override
public boolean onLayoutChild(CoordinatorLayout parent, View child, int layoutDirection) {
parent.onLayoutChild(child, layoutDirection);
mImgBack = (ImageView) parent.findViewById(R.id.img_back);
mImgShare = (ImageView) parent.findViewById(R.id.img_share);
mDependencyMaxTranslation = 2*mHeightToolbar+mIconSizeStart/2;
return true;
}
//依赖白色的内容背景
@Override
public boolean layoutDependsOn(CoordinatorLayout parent, View child, View dependency) {
return dependency.getId() == R.id.fyt_content;
}
@Override
public boolean onDependentViewChanged(CoordinatorLayout parent, View child, View dependency) {
float fraction = Math.abs(dependency.getTranslationY()) / mDependencyMaxTranslation;
if (fraction < 0.2) {//用于白色变化
fraction = 1 - fraction / 0.2f;// 从1到0
setWhiteIcon();
} else {//用于黑色变化
fraction = (fraction - 0.2f) / 0.8f;// 从0到1
setBlackIcon();
}
mImgBack.setAlpha(fraction);
mImgShare.setAlpha(fraction);
return true;
}
private void setWhiteIcon() {
mImgBack.setImageResource(R.drawable.ic_arrow_back_white_24dp);
mImgShare.setImageResource(R.drawable.ic_share_white_24dp);
}
private void setBlackIcon() {
mImgBack.setImageResource(R.drawable.ic_arrow_back_black_24dp);
mImgShare.setImageResource(R.drawable.ic_share_black_24dp);
}
}
| apache-2.0 |
pjungermann/config-validator | src/test/java/com/github/pjungermann/config/specification/constraint/InvalidConstraintConfigErrorTest.java | 3392 | /*
* Copyright 2015-2016 Patrick Jungermann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.pjungermann.config.specification.constraint;
import com.github.pjungermann.config.Config;
import com.github.pjungermann.config.ConfigError;
import com.github.pjungermann.config.reference.SourceLine;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.junit.Test;
import org.springframework.context.MessageSourceResolvable;
import org.springframework.context.support.DefaultMessageSourceResolvable;
import java.io.File;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
/**
* Tests for {@link InvalidConstraintConfigError}.
*
* @author Patrick Jungermann
*/
public class InvalidConstraintConfigErrorTest {
@Test
public void toString_always_messageContainingConstraintNameAndConfigKeyAndUsedConfig() {
InvalidConstraintConfigError error = new InvalidConstraintConfigError(new FakeConstraint(), null);
assertEquals("Illegal config for constraint \"fake\" for config key \"fake-key\": null", error.toString());
}
@Test
public void getMessage_always_resolvableWithCorrectCodeAndArguments() {
InvalidConstraintConfigError error = new InvalidConstraintConfigError(new FakeConstraint(), null);
MessageSourceResolvable resolvable = error.getMessage();
String[] codes = resolvable.getCodes();
assertEquals(1, codes.length);
assertEquals("errors.constraints.config.invalid", codes[0]);
assertArrayEquals(new Object[]{
"fake-key", "fake", null, new SourceLine(new File("fake"), -1)
}, resolvable.getArguments());
assertEquals(codes[0], resolvable.getDefaultMessage());
}
static class FakeConstraint implements Constraint {
@NotNull
@Override
public SourceLine definedAt() {
return new SourceLine(new File("fake"), -1);
}
@NotNull
@Override
public String getKey() {
return "fake-key";
}
@Override
public boolean supports(Class type) {
return false;
}
@Nullable
@Override
public ConfigError validate(@NotNull Config config) {
return null;
}
@NotNull
@Override
public MessageSourceResolvable getMessage(@Nullable Object value) {
return new DefaultMessageSourceResolvable(value == null ? "<null>" : value.toString());
}
@Override
public int compareTo(@NotNull Constraint o) {
return this.equals(o) ? 0 : -1;
}
@Override
public boolean equals(Object o) {
return this == o;
}
@Override
public int hashCode() {
return 1;
}
}
}
| apache-2.0 |
Malamut54/dbobrov | chapter_008/src/main/java/ru/job4j/sqljob/Init.java | 1781 | package ru.job4j.sqljob;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
/**
* Final task. Parse sql.ru.
*
* @author Dmitriy Bobrov (bobrov.dmitriy@gmail.com)
* @since 11.12.2017
*/
public class Init {
/**
* Star frequency.
*/
private int periodicity;
/**
* URL to DB.
*/
private String urlToDB;
/**
* User for DB.
*/
private String user;
/**
* Password to DB.
*/
private String password;
/**
* Constructor.
*/
public Init() {
this.getCredentials();
}
/**
* Getter for startup frequency.
*
* @return Integer
*/
int getPeriodicity() {
return periodicity;
}
/**
* Getter for URL.
* @return String
*/
public String getUrlToDB() {
return urlToDB;
}
/**
* Getter for user.
* @return String
*/
public String getUser() {
return user;
}
/**
* Getter for password.
* @return String
*/
public String getPassword() {
return password;
}
/**
* Get credentials from property file.
*/
private void getCredentials() {
Properties properties = new Properties();
try {
File file = new File("sqljob.properties");
properties.load(new FileInputStream(file));
} catch (IOException e) {
e.printStackTrace();
}
periodicity = Integer.parseInt(properties.getProperty("PeriodicityValue"));
urlToDB = properties.getProperty("Database.DataURL");
user = properties.getProperty("Database.Prop.user");
password = properties.getProperty("Database.Prop.password");
}
}
| apache-2.0 |
sahb1239/PLC- | PLC++/src/dk/aau/sw402F15/TypeChecker/ExpressionTypeEvaluator.java | 16292 | package dk.aau.sw402F15.TypeChecker;
import dk.aau.sw402F15.Exception.TypeChecker.*;
import dk.aau.sw402F15.Helper;
import dk.aau.sw402F15.Symboltable.Scope;
import dk.aau.sw402F15.Symboltable.Symbol;
import dk.aau.sw402F15.Symboltable.SymbolArray;
import dk.aau.sw402F15.Symboltable.SymbolFunction;
import dk.aau.sw402F15.Symboltable.Type.SymbolType;
import dk.aau.sw402F15.parser.analysis.DepthFirstAdapter;
import dk.aau.sw402F15.parser.node.*;
import sun.reflect.generics.reflectiveObjects.NotImplementedException;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
/**
* Created by sahb on 24/04/15.
*/
public class ExpressionTypeEvaluator extends DepthFirstAdapter {
private Stack<SymbolType> stack;
private Scope scope;
public ExpressionTypeEvaluator(Scope scope) {
this.stack = new Stack<SymbolType>();
this.scope = scope;
}
public SymbolType getResult() {
// Check if we only have 1 element on stack
if (stack.size() != 1) {
throw new NotImplementedException();
}
return stack.pop();
}
// Assignment
@Override
public void caseAAssignmentExpr(AAssignmentExpr node) {
AssignmentTypeChecker assignmentChecker = new AssignmentTypeChecker(scope);
node.apply(assignmentChecker);
stack.push(assignmentChecker.getResult());
}
// Member expression
@Override
public void caseAMemberExpr(AMemberExpr node) {
MemberExpressionEvaluator memberExpressionEvaluator = new MemberExpressionEvaluator(scope);
node.apply(memberExpressionEvaluator);
stack.push(memberExpressionEvaluator.getSymbol());
}
// Function call
@Override
public void caseAFunctionCallExpr(AFunctionCallExpr node) {
Symbol symbol = scope.getSymbolOrThrow(node.getName().getText(), node);
// Check if symbol is a function - if not throw a exception
if (symbol.getClass() != SymbolFunction.class) {
throw new RuntimeException();
}
// Cast to symbolfunction
SymbolFunction func = (SymbolFunction) symbol;
// Check arguments
{
ExpressionTypeEvaluator expressionEvaluator = new ExpressionTypeEvaluator(scope);
List<PExpr> copy = new ArrayList<PExpr>(node.getArgs());
// Check number of parameters
if (copy.size() != func.getFormalParameters().size())
throw new WrongNumberOfParameters(node, func.getFormalParameters().size(), copy.size());
// Check each expression
for (int i = 0; i < copy.size(); i++) {
PExpr e = copy.get(i);
// Get expression type
e.apply(expressionEvaluator);
SymbolType type = expressionEvaluator.getResult();
if (!type.equals(func.getFormalParameters().get(i)))
throw new WrongParameterTypeException(node, func.getFormalParameters().get(i), type);
}
}
// Push return type to stack
stack.push(func.getReturnType());
}
// Identifier (variables)
@Override
public void outAIdentifierExpr(AIdentifierExpr node) {
super.outAIdentifierExpr(node);
stack.push(scope.getSymbolOrThrow(node.getName().getText(), node).getType());
}
// Constants
@Override
public void outAIntegerExpr(AIntegerExpr node) {
super.outAIntegerExpr(node);
stack.push(SymbolType.Int());
}
@Override
public void outADecimalExpr(ADecimalExpr node) {
super.outADecimalExpr(node);
stack.push(SymbolType.Decimal());
}
@Override
public void outATrueExpr(ATrueExpr node) {
super.outATrueExpr(node);
stack.push(SymbolType.Boolean());
}
@Override
public void outAFalseExpr(AFalseExpr node) {
super.outAFalseExpr(node);
stack.push(SymbolType.Boolean());
}
@Override
public void outAPortInputExpr(APortInputExpr node) {
super.outAPortInputExpr(node);
if (stack.pop().equals(SymbolType.Type.Decimal)) {
stack.push(SymbolType.PortInput());
}
else {
throw new InvalidPortException(node);
}
}
@Override
public void outAPortOutputExpr(APortOutputExpr node) {
super.outAPortOutputExpr(node);
if (stack.pop().equals(SymbolType.Type.Decimal)) {
stack.push(SymbolType.PortOuput());
}
else {
throw new InvalidPortException(node);
}
}
@Override
public void outAPortMemoryExpr(APortMemoryExpr node) {
super.outAPortMemoryExpr(node);
if (stack.pop().equals(SymbolType.Type.Int)) {
stack.push(SymbolType.PortMemory());
}
else {
throw new InvalidPortException(node);
}
}
@Override
public void outAPortAnalogInputExpr(APortAnalogInputExpr node) {
super.outAPortAnalogInputExpr(node);
throw new NotImplementedException();
}
@Override
public void outAPortAnalogOutputExpr(APortAnalogOutputExpr node) {
super.outAPortAnalogOutputExpr(node);
throw new NotImplementedException();
}
// Casts
@Override
public void outATypeCastExpr(ATypeCastExpr node) {
super.outATypeCastExpr(node);
// Cast no typechecker test
stack.pop();
stack.push(Helper.getSymbolTypeFromTypeSpecifier(node.getTargetType()));
}
// Increment and decrement
@Override
public void inAIncrementExpr(AIncrementExpr node) {
super.inAIncrementExpr(node);
// Push identifier on stack
stack.push(scope.getSymbolOrThrow(node.getName().getText(), node).getType());
}
@Override
public void outAIncrementExpr(AIncrementExpr node) {
super.outAIncrementExpr(node);
checkUnary(node);
}
@Override
public void inADecrementExpr(ADecrementExpr node) {
super.inADecrementExpr(node);
// Push identifier on stack
stack.push(scope.getSymbolOrThrow(node.getName().getText(), node).getType());
}
@Override
public void outADecrementExpr(ADecrementExpr node) {
super.outADecrementExpr(node);
checkUnary(node);
}
// Unary plus and minus
@Override
public void outAUnaryPlusExpr(AUnaryPlusExpr node) {
super.outAUnaryPlusExpr(node);
checkUnary(node);
}
@Override
public void outAUnaryMinusExpr(AUnaryMinusExpr node) {
super.outAUnaryMinusExpr(node);
checkUnary(node);
}
// Negation
@Override
public void outANegationExpr(ANegationExpr node) {
super.outANegationExpr(node);
checkUnaryBool(node);
}
// Logic comparison
@Override
public void outACompareAndExpr(ACompareAndExpr node) {
super.outACompareAndExpr(node);
checkLocicComparison(node, node.getLeft(), node.getRight());
}
@Override
public void outACompareOrExpr(ACompareOrExpr node) {
super.outACompareOrExpr(node);
checkLocicComparison(node, node.getLeft(), node.getRight());
}
// Comparison
@Override
public void outACompareGreaterExpr(ACompareGreaterExpr node) {
super.outACompareGreaterExpr(node);
checkComparison(node, node.getLeft(), node.getRight());
}
@Override
public void outACompareLessExpr(ACompareLessExpr node) {
super.outACompareLessExpr(node);
checkComparison(node, node.getLeft(), node.getRight());
}
@Override
public void outACompareLessOrEqualExpr(ACompareLessOrEqualExpr node) {
super.outACompareLessOrEqualExpr(node);
checkComparison(node, node.getLeft(), node.getRight());
}
@Override
public void outACompareGreaterOrEqualExpr(ACompareGreaterOrEqualExpr node) {
super.outACompareGreaterOrEqualExpr(node);
checkComparison(node, node.getLeft(), node.getRight());
}
@Override
public void outACompareEqualExpr(ACompareEqualExpr node) {
super.outACompareEqualExpr(node);
checkComparisonEquality(node, node.getLeft(), node.getRight());
}
@Override
public void outACompareNotEqualExpr(ACompareNotEqualExpr node) {
super.outACompareNotEqualExpr(node);
checkComparisonEquality(node, node.getLeft(), node.getRight());
}
// Math operations
@Override
public void outAAddExpr(AAddExpr node) {
super.outAAddExpr(node);
checkExpression(node, node.getLeft(), node.getRight());
}
@Override
public void outASubExpr(ASubExpr node) {
super.outASubExpr(node);
checkExpression(node, node.getLeft(), node.getRight());
}
@Override
public void outAMultiExpr(AMultiExpr node) {
super.outAMultiExpr(node);
checkExpression(node, node.getLeft(), node.getRight());
}
@Override
public void outADivExpr(ADivExpr node) {
super.outADivExpr(node);
checkExpression(node, node.getLeft(), node.getRight());
// Checking that we don't divide by zero
if (node.getRight().getClass() == AIntegerExpr.class) {
if (Integer.parseInt(((AIntegerExpr) node.getRight()).getIntegerLiteral().getText()) == 0)
throw new DivisionByZeroException(node);
} else if (node.getRight().getClass() == ADecimalExpr.class) {
if (Float.parseFloat(((ADecimalExpr) node.getRight()).getDecimalLiteral().getText()) == 0.0)
throw new DivisionByZeroException(node);
}
}
@Override
public void outAModExpr(AModExpr node) {
super.outAModExpr(node);
checkExpression(node, node.getLeft(), node.getRight());
}
// Ternary expr
@Override
public void outATernaryExpr(ATernaryExpr node) {
super.outATernaryExpr(node);
// expr = expr ? expr
// All 3 types needs to be equal
SymbolType arg3 = stack.pop(), arg2 = stack.pop(), arg1 = stack.pop();
if (!arg1.equals(arg2) || !arg2.equals(arg3)) {
throw new IncompaitbleTypesException(node, arg1, arg2, arg3);
}
}
// Array expr
@Override
public void outAArrayExpr(AArrayExpr node) {
super.outAArrayExpr(node);
// Get type of index (needs to be an int, since index cannot be a floating point number)
SymbolType arg1 = stack.pop();
if (!arg1.equals(SymbolType.Int())) {
throw new ArrayIndexIsIntException(node);
}
// Check identifier it needs to be an array
Symbol symbol = scope.getSymbolOrThrow(node.getName().getText(), node);
if (!symbol.getType().equals(SymbolType.Array())) {
throw new ArrayIndexOfNonArrayException(node);
}
// Push correct type to stack
SymbolArray array = (SymbolArray) symbol;
stack.push(array.getContainedType());
}
// Helper methods for compare and math operations
// == !=
private void checkComparisonEquality(Node node, PExpr left, PExpr right) {
SymbolType arg2 = stack.pop(), arg1 = stack.pop();
if (arg1.equals(SymbolType.Type.PortInput) && arg2.equals(SymbolType.Type.PortInput)) {
// Cast left
left.replaceBy(new ATypeCastExpr(new ABoolTypeSpecifier(), (PExpr) left.clone()));
// Cast right
right.replaceBy(new ATypeCastExpr(new ABoolTypeSpecifier(), (PExpr) right.clone()));
stack.push(SymbolType.Boolean());
} else if (arg1.equals(arg2)) {
stack.push(SymbolType.Boolean());
} else if (arg1.equals(SymbolType.Type.PortInput) && arg2.equals(SymbolType.Type.Boolean)) {
// Cast left
left.replaceBy(new ATypeCastExpr(new ABoolTypeSpecifier(), (PExpr) left.clone()));
stack.push(SymbolType.Boolean());
} else if (arg2.equals(SymbolType.Type.PortInput) && arg1.equals(SymbolType.Type.Boolean)) {
// Cast right
right.replaceBy(new ATypeCastExpr(new ABoolTypeSpecifier(), (PExpr) right.clone()));
stack.push(SymbolType.Boolean());
} else {
throw new IncompaitbleTypesException(node, arg1, arg2);
}
}
// > >= < <=
private void checkComparison(Node node, PExpr left, PExpr right) {
SymbolType arg2 = stack.pop(), arg1 = stack.pop();
if (arg1.equals(SymbolType.Type.Int) && arg2.equals(SymbolType.Type.Int)) {
stack.push(SymbolType.Boolean());
} else if (arg1.equals(SymbolType.Type.Decimal) && arg2.equals(SymbolType.Type.Decimal)) {
stack.push(SymbolType.Boolean());
} else if (arg1.equals(SymbolType.Type.Decimal) && arg2.equals(SymbolType.Type.Int)) {
// Promote right
right.replaceBy(new ATypeCastExpr(new ADoubleTypeSpecifier(), (PExpr) right.clone()));
stack.push(SymbolType.Boolean());
} else if (arg1.equals(SymbolType.Type.Int) && arg2.equals(SymbolType.Type.Decimal)) {
// Promote left
left.replaceBy(new ATypeCastExpr(new ADoubleTypeSpecifier(), (PExpr) left.clone()));
stack.push(SymbolType.Boolean());
} else {
throw new IncompaitbleTypesException(node, arg1, arg2);
}
}
// && ||
private void checkLocicComparison(Node node, PExpr left, PExpr right) {
SymbolType arg2 = stack.pop(), arg1 = stack.pop();
if ((arg1.equals(SymbolType.Type.Boolean) && arg2.equals(SymbolType.Type.Boolean))) {
stack.push(SymbolType.Boolean());
} else if (arg1.equals(SymbolType.Type.PortInput) && arg2.equals(SymbolType.Type.Boolean)) {
// Cast left
left.replaceBy(new ATypeCastExpr(new ABoolTypeSpecifier(), (PExpr) left.clone()));
stack.push(SymbolType.Boolean());
} else if (arg2.equals(SymbolType.Type.PortInput) && arg1.equals(SymbolType.Type.Boolean)) {
// Cast right
right.replaceBy(new ATypeCastExpr(new ABoolTypeSpecifier(), (PExpr) right.clone()));
stack.push(SymbolType.Boolean());
} else if (arg1.equals(SymbolType.Type.PortInput) && arg2.equals(SymbolType.Type.PortInput)) {
// Cast left
left.replaceBy(new ATypeCastExpr(new ABoolTypeSpecifier(), (PExpr) left.clone()));
// Cast right
right.replaceBy(new ATypeCastExpr(new ABoolTypeSpecifier(), (PExpr) right.clone()));
stack.push(SymbolType.Boolean());
} else {
throw new IncompaitbleTypesException(node, arg1, arg2);
}
}
// + - * / %
private void checkExpression(Node node, PExpr left, PExpr right) {
SymbolType arg2 = stack.pop(), arg1 = stack.pop();
if (arg1.equals(SymbolType.Type.Int) && arg2.equals(SymbolType.Type.Int)) {
stack.push(SymbolType.Int());
} else if (arg1.equals(SymbolType.Type.Decimal) && arg2.equals(SymbolType.Type.Decimal)) {
stack.push(SymbolType.Decimal());
} else if ((arg1.equals(SymbolType.Type.Decimal) && arg2.equals(SymbolType.Type.Int))) {
// Promote right
right.replaceBy(new ATypeCastExpr(new ADoubleTypeSpecifier(), (PExpr) right.clone()));
stack.push(SymbolType.Decimal());
} else if ((arg1.equals(SymbolType.Type.Int) && arg2.equals(SymbolType.Type.Decimal))) {
// Promote left
left.replaceBy(new ATypeCastExpr(new ADoubleTypeSpecifier(), (PExpr) left.clone()));
stack.push(SymbolType.Decimal());
} else {
throw new IncompaitbleTypesException(node, arg1, arg2);
}
}
private void checkUnary(Node node) {
// Don't pop - we don't change type
SymbolType type = stack.peek();
if (!type.equals(SymbolType.Type.Int) && !type.equals(SymbolType.Type.Decimal)) {
throw new ExpectingIntOrDecimalException(node, type);
}
}
private void checkUnaryBool(Node node) {
// Don't pop - we don't change type
SymbolType type = stack.peek();
if (!type.equals(SymbolType.Type.Boolean)) {
throw new ExpectingBoolException(node, type);
}
}
}
| apache-2.0 |
nataraj06/Components | app/src/main/java/com/android/components/activity/BaseActivity.java | 148 | package com.android.components.activity;
import android.support.v7.app.AppCompatActivity;
public class BaseActivity extends AppCompatActivity {
}
| apache-2.0 |
OleksandrProshak/Alexandr_Proshak | Level_Trainee/Part_001_Base_Syntax/4_Conditional_Operator/src/main/java/ru/job4j/task2/Max.java | 679 | package ru.job4j.task2;
/**
* Maximum.
*
* @author Alex Proshak (olexandr_proshak@ukr.net)
*/
public class Max {
/**
* Maximum of two numbers.
* @param first - first param.
* @param second - second param.
* @return returning max value.
*/
public int max(int first, int second) {
return first > second ? first : second;
}
/**
* Maximum of three numbers.
* @param first - first number.
* @param second - second number.
* @param third - third number.
* @return returning max value.
*/
public int max(int first, int second, int third) {
return max(first, max(second, third));
}
}
| apache-2.0 |
chunyang-wen/orc | java/tools/src/java/org/apache/orc/tools/JsonFileDump.java | 17136 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.orc.tools;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.orc.CompressionKind;
import org.apache.orc.OrcFile;
import org.apache.orc.Reader;
import org.apache.orc.TypeDescription;
import org.apache.orc.impl.AcidStats;
import org.apache.orc.impl.OrcAcidUtils;
import org.apache.orc.impl.RecordReaderImpl;
import org.apache.orc.util.BloomFilter;
import org.codehaus.jettison.json.JSONArray;
import org.apache.orc.util.BloomFilterIO;
import org.apache.orc.BinaryColumnStatistics;
import org.apache.orc.BooleanColumnStatistics;
import org.apache.orc.ColumnStatistics;
import org.apache.orc.impl.ColumnStatisticsImpl;
import org.apache.orc.DateColumnStatistics;
import org.apache.orc.DecimalColumnStatistics;
import org.apache.orc.DoubleColumnStatistics;
import org.apache.orc.IntegerColumnStatistics;
import org.apache.orc.impl.OrcIndex;
import org.apache.orc.OrcProto;
import org.apache.orc.StringColumnStatistics;
import org.apache.orc.StripeInformation;
import org.apache.orc.StripeStatistics;
import org.apache.orc.TimestampColumnStatistics;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.codehaus.jettison.json.JSONStringer;
import org.codehaus.jettison.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* File dump tool with json formatted output.
*/
public class JsonFileDump {
private static final Logger LOG = LoggerFactory.getLogger(JsonFileDump.class);
public static void printJsonMetaData(List<String> files,
Configuration conf,
List<Integer> rowIndexCols, boolean prettyPrint, boolean printTimeZone)
throws JSONException, IOException {
if (files.isEmpty()) {
return;
}
JSONStringer writer = new JSONStringer();
boolean multiFile = files.size() > 1;
if (multiFile) {
writer.array();
} else {
writer.object();
}
for (String filename : files) {
try {
if (multiFile) {
writer.object();
}
writer.key("fileName").value(filename);
Path path = new Path(filename);
Reader reader = FileDump.getReader(path, conf, null);
if (reader == null) {
writer.key("status").value("FAILED");
continue;
}
writer.key("fileVersion").value(reader.getFileVersion().getName());
writer.key("writerVersion").value(reader.getWriterVersion());
RecordReaderImpl rows = (RecordReaderImpl) reader.rows();
writer.key("numberOfRows").value(reader.getNumberOfRows());
writer.key("compression").value(reader.getCompressionKind());
if (reader.getCompressionKind() != CompressionKind.NONE) {
writer.key("compressionBufferSize").value(reader.getCompressionSize());
}
writer.key("schemaString").value(reader.getSchema().toString());
writer.key("schema").array();
writeSchema(writer, reader.getTypes());
writer.endArray();
writer.key("stripeStatistics").array();
List<StripeStatistics> stripeStatistics = reader.getStripeStatistics();
for (int n = 0; n < stripeStatistics.size(); n++) {
writer.object();
writer.key("stripeNumber").value(n + 1);
StripeStatistics ss = stripeStatistics.get(n);
writer.key("columnStatistics").array();
for (int i = 0; i < ss.getColumnStatistics().length; i++) {
writer.object();
writer.key("columnId").value(i);
writeColumnStatistics(writer, ss.getColumnStatistics()[i]);
writer.endObject();
}
writer.endArray();
writer.endObject();
}
writer.endArray();
ColumnStatistics[] stats = reader.getStatistics();
int colCount = stats.length;
if (rowIndexCols == null) {
rowIndexCols = new ArrayList<>(colCount);
for (int i = 0; i < colCount; ++i) {
rowIndexCols.add(i);
}
}
writer.key("fileStatistics").array();
for (int i = 0; i < stats.length; ++i) {
writer.object();
writer.key("columnId").value(i);
writeColumnStatistics(writer, stats[i]);
writer.endObject();
}
writer.endArray();
writer.key("stripes").array();
int stripeIx = -1;
for (StripeInformation stripe : reader.getStripes()) {
++stripeIx;
long stripeStart = stripe.getOffset();
OrcProto.StripeFooter footer = rows.readStripeFooter(stripe);
writer.object(); // start of stripe information
writer.key("stripeNumber").value(stripeIx + 1);
writer.key("stripeInformation");
writeStripeInformation(writer, stripe);
if (printTimeZone) {
writer.key("writerTimezone").value(
footer.hasWriterTimezone() ? footer.getWriterTimezone() : FileDump.UNKNOWN);
}
long sectionStart = stripeStart;
writer.key("streams").array();
for (OrcProto.Stream section : footer.getStreamsList()) {
writer.object();
String kind = section.hasKind() ? section.getKind().name() : FileDump.UNKNOWN;
writer.key("columnId").value(section.getColumn());
writer.key("section").value(kind);
writer.key("startOffset").value(sectionStart);
writer.key("length").value(section.getLength());
sectionStart += section.getLength();
writer.endObject();
}
writer.endArray();
writer.key("encodings").array();
for (int i = 0; i < footer.getColumnsCount(); ++i) {
writer.object();
OrcProto.ColumnEncoding encoding = footer.getColumns(i);
writer.key("columnId").value(i);
writer.key("kind").value(encoding.getKind());
if (encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY ||
encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY_V2) {
writer.key("dictionarySize").value(encoding.getDictionarySize());
}
writer.endObject();
}
writer.endArray();
if (!rowIndexCols.isEmpty()) {
// include the columns that are specified, only if the columns are included, bloom filter
// will be read
boolean[] sargColumns = new boolean[colCount];
for (int colIdx : rowIndexCols) {
sargColumns[colIdx] = true;
}
OrcIndex indices = rows.readRowIndex(stripeIx, null, sargColumns);
writer.key("indexes").array();
for (int col : rowIndexCols) {
writer.object();
writer.key("columnId").value(col);
writeRowGroupIndexes(writer, col, indices.getRowGroupIndex());
writeBloomFilterIndexes(writer, col, indices,
reader.getWriterVersion(),
reader.getSchema().findSubtype(col).getCategory());
writer.endObject();
}
writer.endArray();
}
writer.endObject(); // end of stripe information
}
writer.endArray();
FileSystem fs = path.getFileSystem(conf);
long fileLen = fs.getContentSummary(path).getLength();
long paddedBytes = FileDump.getTotalPaddingSize(reader);
// empty ORC file is ~45 bytes. Assumption here is file length always >0
double percentPadding = ((double) paddedBytes / (double) fileLen) * 100;
writer.key("fileLength").value(fileLen);
writer.key("paddingLength").value(paddedBytes);
writer.key("paddingRatio").value(percentPadding);
AcidStats acidStats = OrcAcidUtils.parseAcidStats(reader);
if (acidStats != null) {
writer.key("numInserts").value(acidStats.inserts);
writer.key("numDeletes").value(acidStats.deletes);
writer.key("numUpdates").value(acidStats.updates);
}
writer.key("status").value("OK");
rows.close();
writer.endObject();
} catch (Exception e) {
writer.key("status").value("FAILED");
throw e;
}
}
if (multiFile) {
writer.endArray();
}
if (prettyPrint) {
final String prettyJson;
if (multiFile) {
JSONArray jsonArray = new JSONArray(writer.toString());
prettyJson = jsonArray.toString(2);
} else {
JSONObject jsonObject = new JSONObject(writer.toString());
prettyJson = jsonObject.toString(2);
}
System.out.println(prettyJson);
} else {
System.out.println(writer.toString());
}
}
private static void writeSchema(JSONStringer writer, List<OrcProto.Type> types)
throws JSONException {
int i = 0;
for(OrcProto.Type type : types) {
writer.object();
writer.key("columnId").value(i++);
writer.key("columnType").value(type.getKind());
if (type.getFieldNamesCount() > 0) {
writer.key("childColumnNames").array();
for (String field : type.getFieldNamesList()) {
writer.value(field);
}
writer.endArray();
writer.key("childColumnIds").array();
for (Integer colId : type.getSubtypesList()) {
writer.value(colId);
}
writer.endArray();
}
if (type.hasPrecision()) {
writer.key("precision").value(type.getPrecision());
}
if (type.hasScale()) {
writer.key("scale").value(type.getScale());
}
if (type.hasMaximumLength()) {
writer.key("maxLength").value(type.getMaximumLength());
}
writer.endObject();
}
}
private static void writeStripeInformation(JSONWriter writer, StripeInformation stripe)
throws JSONException {
writer.object();
writer.key("offset").value(stripe.getOffset());
writer.key("indexLength").value(stripe.getIndexLength());
writer.key("dataLength").value(stripe.getDataLength());
writer.key("footerLength").value(stripe.getFooterLength());
writer.key("rowCount").value(stripe.getNumberOfRows());
writer.endObject();
}
private static void writeColumnStatistics(JSONWriter writer, ColumnStatistics cs)
throws JSONException {
if (cs != null) {
writer.key("count").value(cs.getNumberOfValues());
writer.key("hasNull").value(cs.hasNull());
if (cs instanceof BinaryColumnStatistics) {
writer.key("totalLength").value(((BinaryColumnStatistics) cs).getSum());
writer.key("type").value(OrcProto.Type.Kind.BINARY);
} else if (cs instanceof BooleanColumnStatistics) {
writer.key("trueCount").value(((BooleanColumnStatistics) cs).getTrueCount());
writer.key("falseCount").value(((BooleanColumnStatistics) cs).getFalseCount());
writer.key("type").value(OrcProto.Type.Kind.BOOLEAN);
} else if (cs instanceof IntegerColumnStatistics) {
writer.key("min").value(((IntegerColumnStatistics) cs).getMinimum());
writer.key("max").value(((IntegerColumnStatistics) cs).getMaximum());
if (((IntegerColumnStatistics) cs).isSumDefined()) {
writer.key("sum").value(((IntegerColumnStatistics) cs).getSum());
}
writer.key("type").value(OrcProto.Type.Kind.LONG);
} else if (cs instanceof DoubleColumnStatistics) {
writer.key("min").value(((DoubleColumnStatistics) cs).getMinimum());
writer.key("max").value(((DoubleColumnStatistics) cs).getMaximum());
writer.key("sum").value(((DoubleColumnStatistics) cs).getSum());
writer.key("type").value(OrcProto.Type.Kind.DOUBLE);
} else if (cs instanceof StringColumnStatistics) {
writer.key("min").value(((StringColumnStatistics) cs).getMinimum());
writer.key("max").value(((StringColumnStatistics) cs).getMaximum());
writer.key("totalLength").value(((StringColumnStatistics) cs).getSum());
writer.key("type").value(OrcProto.Type.Kind.STRING);
} else if (cs instanceof DateColumnStatistics) {
if (((DateColumnStatistics) cs).getMaximum() != null) {
writer.key("min").value(((DateColumnStatistics) cs).getMinimum());
writer.key("max").value(((DateColumnStatistics) cs).getMaximum());
}
writer.key("type").value(OrcProto.Type.Kind.DATE);
} else if (cs instanceof TimestampColumnStatistics) {
if (((TimestampColumnStatistics) cs).getMaximum() != null) {
writer.key("min").value(((TimestampColumnStatistics) cs).getMinimum());
writer.key("max").value(((TimestampColumnStatistics) cs).getMaximum());
}
writer.key("type").value(OrcProto.Type.Kind.TIMESTAMP);
} else if (cs instanceof DecimalColumnStatistics) {
if (((DecimalColumnStatistics) cs).getMaximum() != null) {
writer.key("min").value(((DecimalColumnStatistics) cs).getMinimum());
writer.key("max").value(((DecimalColumnStatistics) cs).getMaximum());
writer.key("sum").value(((DecimalColumnStatistics) cs).getSum());
}
writer.key("type").value(OrcProto.Type.Kind.DECIMAL);
}
}
}
private static void writeBloomFilterIndexes(JSONWriter writer, int col,
OrcIndex index,
OrcFile.WriterVersion version,
TypeDescription.Category type
) throws JSONException {
BloomFilter stripeLevelBF = null;
OrcProto.BloomFilterIndex[] bloomFilterIndex = index.getBloomFilterIndex();
if (bloomFilterIndex != null && bloomFilterIndex[col] != null) {
int entryIx = 0;
writer.key("bloomFilterIndexes").array();
for (OrcProto.BloomFilter bf : bloomFilterIndex[col].getBloomFilterList()) {
writer.object();
writer.key("entryId").value(entryIx++);
BloomFilter toMerge = BloomFilterIO.deserialize(
index.getBloomFilterKinds()[col], version, type, bf);
writeBloomFilterStats(writer, toMerge);
if (stripeLevelBF == null) {
stripeLevelBF = toMerge;
} else {
stripeLevelBF.merge(toMerge);
}
writer.endObject();
}
writer.endArray();
}
if (stripeLevelBF != null) {
writer.key("stripeLevelBloomFilter");
writer.object();
writeBloomFilterStats(writer, stripeLevelBF);
writer.endObject();
}
}
private static void writeBloomFilterStats(JSONWriter writer, BloomFilter bf)
throws JSONException {
int bitCount = bf.getBitSize();
int popCount = 0;
for (long l : bf.getBitSet()) {
popCount += Long.bitCount(l);
}
int k = bf.getNumHashFunctions();
float loadFactor = (float) popCount / (float) bitCount;
float expectedFpp = (float) Math.pow(loadFactor, k);
writer.key("numHashFunctions").value(k);
writer.key("bitCount").value(bitCount);
writer.key("popCount").value(popCount);
writer.key("loadFactor").value(loadFactor);
writer.key("expectedFpp").value(expectedFpp);
}
private static void writeRowGroupIndexes(JSONWriter writer, int col,
OrcProto.RowIndex[] rowGroupIndex)
throws JSONException {
OrcProto.RowIndex index;
if (rowGroupIndex == null || (col >= rowGroupIndex.length) ||
((index = rowGroupIndex[col]) == null)) {
return;
}
writer.key("rowGroupIndexes").array();
for (int entryIx = 0; entryIx < index.getEntryCount(); ++entryIx) {
writer.object();
writer.key("entryId").value(entryIx);
OrcProto.RowIndexEntry entry = index.getEntry(entryIx);
if (entry == null) {
continue;
}
OrcProto.ColumnStatistics colStats = entry.getStatistics();
writeColumnStatistics(writer, ColumnStatisticsImpl.deserialize(colStats));
writer.key("positions").array();
for (int posIx = 0; posIx < entry.getPositionsCount(); ++posIx) {
writer.value(entry.getPositions(posIx));
}
writer.endArray();
writer.endObject();
}
writer.endArray();
}
}
| apache-2.0 |
y12studio/y12java | beyes/src/main/java/tw/y12/beyes/googl/GgService.java | 250 | package tw.y12.beyes.googl;
import retrofit.Callback;
import retrofit.http.Body;
import retrofit.http.POST;
public interface GgService {
@POST("/urlshortener/v1/url")
void shortenUrl(@Body GgRequest requestBody, Callback<GgResponse> callback);
}
| apache-2.0 |
yvoswillens/flowable-engine | modules/flowable-engine/src/test/java/org/flowable/engine/test/history/CompositeHistoryManagerTest.java | 21202 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.test.history;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.same;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.flowable.bpmn.model.FlowElement;
import org.flowable.bpmn.model.SequenceFlow;
import org.flowable.common.engine.impl.history.HistoryLevel;
import org.flowable.engine.impl.history.CompositeHistoryManager;
import org.flowable.engine.impl.history.HistoryManager;
import org.flowable.engine.impl.persistence.entity.ActivityInstanceEntity;
import org.flowable.engine.impl.persistence.entity.ActivityInstanceEntityImpl;
import org.flowable.engine.impl.persistence.entity.ExecutionEntity;
import org.flowable.engine.impl.persistence.entity.ExecutionEntityImpl;
import org.flowable.engine.impl.persistence.entity.HistoricActivityInstanceEntity;
import org.flowable.engine.impl.persistence.entity.HistoricActivityInstanceEntityImpl;
import org.flowable.engine.impl.persistence.entity.ProcessDefinitionEntity;
import org.flowable.engine.impl.persistence.entity.ProcessDefinitionEntityImpl;
import org.flowable.engine.runtime.ActivityInstance;
import org.flowable.entitylink.service.impl.persistence.entity.EntityLinkEntity;
import org.flowable.entitylink.service.impl.persistence.entity.EntityLinkEntityImpl;
import org.flowable.identitylink.service.impl.persistence.entity.IdentityLinkEntity;
import org.flowable.identitylink.service.impl.persistence.entity.IdentityLinkEntityImpl;
import org.flowable.task.api.history.HistoricTaskLogEntryBuilder;
import org.flowable.task.service.impl.BaseHistoricTaskLogEntryBuilderImpl;
import org.flowable.task.service.impl.persistence.entity.TaskEntity;
import org.flowable.task.service.impl.persistence.entity.TaskEntityImpl;
import org.flowable.variable.service.impl.persistence.entity.VariableInstanceEntity;
import org.flowable.variable.service.impl.persistence.entity.VariableInstanceEntityImpl;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoSettings;
/**
* @author Filip Hrisafov
*/
@MockitoSettings
class CompositeHistoryManagerTest {
@Mock
protected HistoryManager historyManager1;
@Mock
protected HistoryManager historyManager2;
protected HistoryManager compositeHistoryManager;
@BeforeEach
void setUp() {
compositeHistoryManager = new CompositeHistoryManager(Arrays.asList(historyManager1, historyManager2));
}
@Test
void isHistoryLevelAtLeastOnlyLevelNoneSayTrue() {
assertThat(compositeHistoryManager.isHistoryLevelAtLeast(HistoryLevel.FULL)).isFalse();
verify(historyManager1).isHistoryLevelAtLeast(HistoryLevel.FULL);
verify(historyManager2).isHistoryLevelAtLeast(HistoryLevel.FULL);
}
@Test
void isHistoryLevelAtLeastOnlyLevelFirstSaysTrue() {
when(historyManager1.isHistoryLevelAtLeast(HistoryLevel.FULL)).thenReturn(true);
assertThat(compositeHistoryManager.isHistoryLevelAtLeast(HistoryLevel.FULL)).isTrue();
}
@Test
void isHistoryLevelAtLeastWithDefinitionIdNoneSayTrue() {
assertThat(compositeHistoryManager.isHistoryLevelAtLeast(HistoryLevel.AUDIT, "def-1")).isFalse();
verify(historyManager1).isHistoryLevelAtLeast(HistoryLevel.AUDIT, "def-1");
verify(historyManager1).isHistoryLevelAtLeast(HistoryLevel.AUDIT, "def-1");
}
@Test
void isHistoryLevelAtLeastWithDefinitionIdLastSaysTrue() {
when(historyManager2.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, "def-2")).thenReturn(true);
assertThat(compositeHistoryManager.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, "def-2")).isTrue();
verify(historyManager1).isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, "def-2");
}
@Test
void isHistoryEnabledNoneSayTrue() {
assertThat(compositeHistoryManager.isHistoryEnabled()).isFalse();
verify(historyManager1).isHistoryEnabled();
verify(historyManager2).isHistoryEnabled();
}
@Test
void isHistoryEnabledLastSayTrue() {
when(historyManager2.isHistoryEnabled()).thenReturn(true);
assertThat(compositeHistoryManager.isHistoryEnabled()).isTrue();
verify(historyManager1).isHistoryEnabled();
}
@Test
void isHistoryEnabledWithDefinitionNoneSayTrue() {
assertThat(compositeHistoryManager.isHistoryEnabled("def-1")).isFalse();
verify(historyManager1).isHistoryEnabled("def-1");
verify(historyManager2).isHistoryEnabled("def-1");
}
@Test
void isHistoryEnabledWithDefinitionFirstSaysTrue() {
when(historyManager1.isHistoryEnabled("def-2")).thenReturn(true);
assertThat(compositeHistoryManager.isHistoryEnabled("def-2")).isTrue();
}
@Test
void recordProcessInstanceEnd() {
ExecutionEntity instance = new ExecutionEntityImpl();
compositeHistoryManager.recordProcessInstanceEnd(instance, "reason", "activity-id");
verify(historyManager1).recordProcessInstanceEnd(same(instance), eq("reason"), eq("activity-id"));
verify(historyManager2).recordProcessInstanceEnd(same(instance), eq("reason"), eq("activity-id"));
}
@Test
void recordProcessInstanceStart() {
ExecutionEntity instance = new ExecutionEntityImpl();
compositeHistoryManager.recordProcessInstanceStart(instance);
verify(historyManager1).recordProcessInstanceStart(same(instance));
verify(historyManager2).recordProcessInstanceStart(same(instance));
}
@Test
void recordProcessInstanceNameChange() {
ExecutionEntity instance = new ExecutionEntityImpl();
compositeHistoryManager.recordProcessInstanceNameChange(instance, "new name");
verify(historyManager1).recordProcessInstanceNameChange(same(instance), eq("new name"));
verify(historyManager2).recordProcessInstanceNameChange(same(instance), eq("new name"));
}
@Test
void recordProcessInstanceDeleted() {
compositeHistoryManager.recordProcessInstanceDeleted("instance-id", "def-id");
verify(historyManager1).recordProcessInstanceDeleted("instance-id", "def-id");
verify(historyManager2).recordProcessInstanceDeleted("instance-id", "def-id");
}
@Test
void recordDeleteHistoricProcessInstancesByProcessDefinitionId() {
compositeHistoryManager.recordDeleteHistoricProcessInstancesByProcessDefinitionId("def-4");
verify(historyManager1).recordDeleteHistoricProcessInstancesByProcessDefinitionId("def-4");
verify(historyManager2).recordDeleteHistoricProcessInstancesByProcessDefinitionId("def-4");
}
@Test
void recordActivityStart() {
ActivityInstance instance = new ActivityInstanceEntityImpl();
compositeHistoryManager.recordActivityStart(instance);
verify(historyManager1).recordActivityStart(same(instance));
verify(historyManager2).recordActivityStart(same(instance));
}
@Test
void recordActivityEnd() {
ActivityInstance instance = new ActivityInstanceEntityImpl();
compositeHistoryManager.recordActivityEnd(instance);
verify(historyManager1).recordActivityEnd(same(instance));
verify(historyManager2).recordActivityEnd(same(instance));
}
@Test
void recordActivityEndWithExecutionEntity() {
ExecutionEntity instance = new ExecutionEntityImpl();
compositeHistoryManager.recordActivityEnd(instance, "reason");
verify(historyManager1).recordActivityEnd(same(instance), eq("reason"));
verify(historyManager2).recordActivityEnd(same(instance), eq("reason"));
}
@Test
void findHistoricActivityInstanceNoneReturn() {
ExecutionEntity instance = new ExecutionEntityImpl();
assertThat(compositeHistoryManager.findHistoricActivityInstance(instance, true)).isNull();
verify(historyManager1).findHistoricActivityInstance(same(instance), eq(true));
verify(historyManager2).findHistoricActivityInstance(same(instance), eq(true));
}
@Test
void findHistoricActivityInstanceFirstReturns() {
ExecutionEntity instance = new ExecutionEntityImpl();
HistoricActivityInstanceEntity historicActivityInstance = new HistoricActivityInstanceEntityImpl();
when(historyManager1.findHistoricActivityInstance(same(instance), eq(true))).thenReturn(historicActivityInstance);
assertThat(compositeHistoryManager.findHistoricActivityInstance(instance, true)).isSameAs(historicActivityInstance);
}
@Test
void recordProcessDefinitionChange() {
compositeHistoryManager.recordProcessDefinitionChange("instance-id", "def-change");
verify(historyManager1).recordProcessDefinitionChange("instance-id", "def-change");
verify(historyManager2).recordProcessDefinitionChange("instance-id", "def-change");
}
@Test
void recordTaskCreated() {
TaskEntity task = new TaskEntityImpl();
ExecutionEntity instance = new ExecutionEntityImpl();
compositeHistoryManager.recordTaskCreated(task, instance);
verify(historyManager1).recordTaskCreated(same(task), same(instance));
verify(historyManager2).recordTaskCreated(same(task), same(instance));
}
@Test
void recordTaskEnd() {
TaskEntity task = new TaskEntityImpl();
ExecutionEntity instance = new ExecutionEntityImpl();
compositeHistoryManager.recordTaskEnd(task, instance, "test");
verify(historyManager1).recordTaskEnd(same(task), same(instance), eq("test"));
verify(historyManager2).recordTaskEnd(same(task), same(instance), eq("test"));
}
@Test
void recordTaskInfoChange() {
TaskEntity task = new TaskEntityImpl();
compositeHistoryManager.recordTaskInfoChange(task, "activity");
verify(historyManager1).recordTaskInfoChange(same(task), eq("activity"));
verify(historyManager2).recordTaskInfoChange(same(task), eq("activity"));
}
@Test
void recordVariableCreate() {
VariableInstanceEntity variable = new VariableInstanceEntityImpl();
compositeHistoryManager.recordVariableCreate(variable);
verify(historyManager1).recordVariableCreate(same(variable));
verify(historyManager2).recordVariableCreate(same(variable));
}
@Test
void recordHistoricDetailVariableCreate() {
VariableInstanceEntity variable = new VariableInstanceEntityImpl();
ExecutionEntity execution = new ExecutionEntityImpl();
compositeHistoryManager.recordHistoricDetailVariableCreate(variable, execution, true, "id");
verify(historyManager1).recordHistoricDetailVariableCreate(same(variable), same(execution), eq(true), eq("id"));
verify(historyManager2).recordHistoricDetailVariableCreate(same(variable), same(execution), eq(true), eq("id"));
}
@Test
void recordVariableUpdate() {
VariableInstanceEntity variable = new VariableInstanceEntityImpl();
compositeHistoryManager.recordVariableUpdate(variable);
verify(historyManager1).recordVariableUpdate(same(variable));
verify(historyManager2).recordVariableUpdate(same(variable));
}
@Test
void recordVariableRemoved() {
VariableInstanceEntity variable = new VariableInstanceEntityImpl();
compositeHistoryManager.recordVariableRemoved(variable);
verify(historyManager1).recordVariableRemoved(same(variable));
verify(historyManager2).recordVariableRemoved(same(variable));
}
@Test
void createIdentityLinkComment() {
TaskEntity task = new TaskEntityImpl();
compositeHistoryManager.createIdentityLinkComment(task, "user-1", "group-1", "type-1", true);
verify(historyManager1).createIdentityLinkComment(same(task), eq("user-1"), eq("group-1"), eq("type-1"), eq(true));
verify(historyManager2).createIdentityLinkComment(same(task), eq("user-1"), eq("group-1"), eq("type-1"), eq(true));
}
@Test
void createUserIdentityLinkComment() {
TaskEntity task = new TaskEntityImpl();
compositeHistoryManager.createUserIdentityLinkComment(task, "user-1", "type-1", true);
verify(historyManager1).createUserIdentityLinkComment(same(task), eq("user-1"), eq("type-1"), eq(true));
verify(historyManager2).createUserIdentityLinkComment(same(task), eq("user-1"), eq("type-1"), eq(true));
}
@Test
void createGroupIdentityLinkComment() {
TaskEntity task = new TaskEntityImpl();
compositeHistoryManager.createGroupIdentityLinkComment(task, "group-1", "type-1", true);
verify(historyManager1).createGroupIdentityLinkComment(same(task), eq("group-1"), eq("type-1"), eq(true));
verify(historyManager2).createGroupIdentityLinkComment(same(task), eq("group-1"), eq("type-1"), eq(true));
}
@Test
void createIdentityLinkCommentWithForceNullUser() {
TaskEntity task = new TaskEntityImpl();
compositeHistoryManager.createIdentityLinkComment(task, "user-2", "group-2", "type-2", false, false);
verify(historyManager1).createIdentityLinkComment(same(task), eq("user-2"), eq("group-2"), eq("type-2"), eq(false), eq(false));
verify(historyManager2).createIdentityLinkComment(same(task), eq("user-2"), eq("group-2"), eq("type-2"), eq(false), eq(false));
}
@Test
void createUserIdentityLinkCommentWithForceNullUser() {
TaskEntity task = new TaskEntityImpl();
compositeHistoryManager.createUserIdentityLinkComment(task, "user-1", "type-1", true, true);
verify(historyManager1).createUserIdentityLinkComment(same(task), eq("user-1"), eq("type-1"), eq(true), eq(true));
verify(historyManager2).createUserIdentityLinkComment(same(task), eq("user-1"), eq("type-1"), eq(true), eq(true));
}
@Test
void createProcessInstanceIdentityLinkComment() {
ExecutionEntity processInstance = new ExecutionEntityImpl();
compositeHistoryManager.createProcessInstanceIdentityLinkComment(processInstance, "user-1", "group-1", "type-1", true);
verify(historyManager1).createProcessInstanceIdentityLinkComment(same(processInstance), eq("user-1"), eq("group-1"), eq("type-1"), eq(true));
verify(historyManager2).createProcessInstanceIdentityLinkComment(same(processInstance), eq("user-1"), eq("group-1"), eq("type-1"), eq(true));
}
@Test
void createProcessInstanceIdentityLinkCommentWithForceNullUser() {
ExecutionEntity processInstance = new ExecutionEntityImpl();
compositeHistoryManager.createProcessInstanceIdentityLinkComment(processInstance, "user-2", "group-2", "type-2", false, true);
verify(historyManager1).createProcessInstanceIdentityLinkComment(same(processInstance), eq("user-2"), eq("group-2"), eq("type-2"), eq(false), eq(true));
verify(historyManager2).createProcessInstanceIdentityLinkComment(same(processInstance), eq("user-2"), eq("group-2"), eq("type-2"), eq(false), eq(true));
}
@Test
void createAttachmentComment() {
TaskEntity task = new TaskEntityImpl();
ExecutionEntity processInstance = new ExecutionEntityImpl();
compositeHistoryManager.createAttachmentComment(task, processInstance, "name", true);
verify(historyManager1).createAttachmentComment(same(task), same(processInstance), eq("name"), eq(true));
verify(historyManager2).createAttachmentComment(same(task), same(processInstance), eq("name"), eq(true));
}
@Test
void recordFormPropertiesSubmitted() {
ExecutionEntity processInstance = new ExecutionEntityImpl();
Map<String, String> properties = new HashMap<>();
properties.put("key", "value");
compositeHistoryManager.recordFormPropertiesSubmitted(processInstance, properties, "task-1");
verify(historyManager1).recordFormPropertiesSubmitted(same(processInstance), eq(properties), eq("task-1"));
verify(historyManager2).recordFormPropertiesSubmitted(same(processInstance), eq(properties), eq("task-1"));
}
@Test
void recordIdentityLinkCreated() {
IdentityLinkEntity identityLink = new IdentityLinkEntityImpl();
compositeHistoryManager.recordIdentityLinkCreated(identityLink);
verify(historyManager1).recordIdentityLinkCreated(same(identityLink));
verify(historyManager2).recordIdentityLinkCreated(same(identityLink));
}
@Test
void recordIdentityLinkDeleted() {
IdentityLinkEntity identityLink = new IdentityLinkEntityImpl();
compositeHistoryManager.recordIdentityLinkDeleted(identityLink);
verify(historyManager1).recordIdentityLinkDeleted(same(identityLink));
verify(historyManager2).recordIdentityLinkDeleted(same(identityLink));
}
@Test
void recordEntityLinkCreated() {
EntityLinkEntity entityLink = new EntityLinkEntityImpl();
compositeHistoryManager.recordEntityLinkCreated(entityLink);
verify(historyManager1).recordEntityLinkCreated(same(entityLink));
verify(historyManager2).recordEntityLinkCreated(same(entityLink));
}
@Test
void recordEntityLinkDeleted() {
EntityLinkEntity entityLink = new EntityLinkEntityImpl();
compositeHistoryManager.recordEntityLinkDeleted(entityLink);
verify(historyManager1).recordEntityLinkDeleted(same(entityLink));
verify(historyManager2).recordEntityLinkDeleted(same(entityLink));
}
@Test
void updateProcessBusinessKeyInHistory() {
ExecutionEntity processInstance = new ExecutionEntityImpl();
compositeHistoryManager.updateProcessBusinessKeyInHistory(processInstance);
verify(historyManager1).updateProcessBusinessKeyInHistory(same(processInstance));
verify(historyManager2).updateProcessBusinessKeyInHistory(same(processInstance));
}
@Test
void updateProcessDefinitionIdInHistory() {
ProcessDefinitionEntity processDefinition = new ProcessDefinitionEntityImpl();
ExecutionEntity processInstance = new ExecutionEntityImpl();
compositeHistoryManager.updateProcessDefinitionIdInHistory(processDefinition, processInstance);
verify(historyManager1).updateProcessDefinitionIdInHistory(same(processDefinition), same(processInstance));
verify(historyManager2).updateProcessDefinitionIdInHistory(same(processDefinition), same(processInstance));
}
@Test
void updateActivity() {
ExecutionEntity execution = new ExecutionEntityImpl();
FlowElement flowElement = new SequenceFlow();
TaskEntity task = new TaskEntityImpl();
compositeHistoryManager.updateActivity(execution, "old-id", flowElement, task);
verify(historyManager1).updateActivity(same(execution), eq("old-id"), same(flowElement), same(task));
verify(historyManager2).updateActivity(same(execution), eq("old-id"), same(flowElement), same(task));
}
@Test
void updateHistoricActivityInstance() {
ActivityInstanceEntity activityInstance = new ActivityInstanceEntityImpl();
compositeHistoryManager.updateHistoricActivityInstance(activityInstance);
verify(historyManager1).updateHistoricActivityInstance(same(activityInstance));
verify(historyManager2).updateHistoricActivityInstance(same(activityInstance));
}
@Test
void createHistoricActivityInstance() {
ActivityInstanceEntity activityInstance = new ActivityInstanceEntityImpl();
compositeHistoryManager.createHistoricActivityInstance(activityInstance);
verify(historyManager1).createHistoricActivityInstance(same(activityInstance));
verify(historyManager2).createHistoricActivityInstance(same(activityInstance));
}
@Test
void recordHistoricUserTaskLogEntry() {
HistoricTaskLogEntryBuilder taskLogEntryBuilder = new BaseHistoricTaskLogEntryBuilderImpl();
compositeHistoryManager.recordHistoricUserTaskLogEntry(taskLogEntryBuilder);
verify(historyManager1).recordHistoricUserTaskLogEntry(same(taskLogEntryBuilder));
verify(historyManager2).recordHistoricUserTaskLogEntry(same(taskLogEntryBuilder));
}
@Test
void deleteHistoricUserTaskLogEntry() {
compositeHistoryManager.deleteHistoryUserTaskLog(10L);
verify(historyManager1).deleteHistoryUserTaskLog(10L);
verify(historyManager2).deleteHistoryUserTaskLog(10L);
}
}
| apache-2.0 |
zhenggf/config.server | config.developer/src/main/java/cn/orgid/funny/config/web/developer/system/SystemAction.java | 323 | package cn.orgid.funny.config.web.developer.system;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
@RequestMapping("/system")
public class SystemAction {
@RequestMapping("do_register")
public String register(){
return "";
}
}
| apache-2.0 |
trustathsh/visitmeta | dataservice/src/main/java/de/hshannover/f4/trust/visitmeta/persistence/neo4j/LinkTypes.java | 1867 | /*
* #%L
* =====================================================
* _____ _ ____ _ _ _ _
* |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | |
* | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| |
* | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ |
* |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_|
* \____/
*
* =====================================================
*
* Hochschule Hannover
* (University of Applied Sciences and Arts, Hannover)
* Faculty IV, Dept. of Computer Science
* Ricklinger Stadtweg 118, 30459 Hannover, Germany
*
* Email: trust@f4-i.fh-hannover.de
* Website: http://trust.f4.hs-hannover.de/
*
* This file is part of visitmeta-dataservice, version 0.6.0,
* implemented by the Trust@HsH research group at the Hochschule Hannover.
* %%
* Copyright (C) 2012 - 2016 Trust@HsH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package de.hshannover.f4.trust.visitmeta.persistence.neo4j;
import org.neo4j.graphdb.RelationshipType;
/**
* Enumerate the different types of Relationship types in the Neo4J database.
* <ul>
* <li>Link: link between a dummy node and an identifier.</li>
* <li>Meta: link to a metadata.</li>
* </ul>
* @author ben
*
*/
public enum LinkTypes implements RelationshipType {
Link,
Meta
}
| apache-2.0 |
liangdas/showapp | appshow/src/com/xianv/apkshow/util/ACache.java | 21984 | package com.xianv.apkshow.util;
/**
* Copyright (c) 2012-2013, Michael Yang 杨福海 (www.yangfuhai.com).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.RandomAccessFile;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.json.JSONArray;
import org.json.JSONObject;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.PixelFormat;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
/**
* @author Michael Yang(www.yangfuhai.com) update at 2013.08.07
*/
public class ACache {
public static final int TIME_HOUR = 60 * 60;
public static final int TIME_DAY = TIME_HOUR * 24;
private static final int MAX_SIZE = 1000 * 1000 * 50; // 50 mb
private static final int MAX_COUNT = Integer.MAX_VALUE; // 不限制存放数据的数量
private static Map<String, ACache> mInstanceMap = new HashMap<String, ACache>();
private ACacheManager mCache;
public static ACache get(Context ctx) {
return get(ctx, "ACache");
}
public static ACache get(Context ctx, String cacheName) {
File f = new File(ctx.getCacheDir(), cacheName);
return get(f, MAX_SIZE, MAX_COUNT);
}
public static ACache get(File cacheDir) {
return get(cacheDir, MAX_SIZE, MAX_COUNT);
}
public static ACache get(Context ctx, long max_zise, int max_count) {
File f = new File(ctx.getCacheDir(), "seach_ACache");
return get(f, max_zise, max_count);
}
public static ACache get(File cacheDir, long max_zise, int max_count) {
ACache manager = mInstanceMap.get(cacheDir.getAbsoluteFile() + myPid());
if (manager == null) {
manager = new ACache(cacheDir, max_zise, max_count);
mInstanceMap.put(cacheDir.getAbsolutePath() + myPid(), manager);
}
return manager;
}
private static String myPid() {
return "_" + android.os.Process.myPid();
}
private ACache(File cacheDir, long max_size, int max_count) {
if (!cacheDir.exists() && !cacheDir.mkdirs()) {
throw new RuntimeException("can't make dirs in "
+ cacheDir.getAbsolutePath());
}
mCache = new ACacheManager(cacheDir, max_size, max_count);
}
// =======================================
// ============ String数据 读写 ==============
// =======================================
/**
* 保存 String数据 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的String数据
*/
public void put(String key, String value) {
File file = mCache.newFile(key);
BufferedWriter out = null;
try {
out = new BufferedWriter(new FileWriter(file), 1024);
out.write(value);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (out != null) {
try {
out.flush();
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
mCache.put(file);
}
}
/**
* 保存 String数据 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的String数据
* @param saveTime
* 保存的时间,单位:秒
*/
public void put(String key, String value, int saveTime) {
put(key, Utils.newStringWithDateInfo(saveTime, value));
}
/**
* 读取 String数据
*
* @param key
* @return String 数据
*/
public String getAsString(String key) {
File file = mCache.get(key);
if (!file.exists())
return null;
boolean removeFile = false;
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(file));
String readString = "";
String currentLine;
while ((currentLine = in.readLine()) != null) {
readString += currentLine;
}
if (!Utils.isDue(readString)) {
return Utils.clearDateInfo(readString);
} else {
removeFile = true;
return null;
}
} catch (IOException e) {
e.printStackTrace();
return null;
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (removeFile)
remove(key);
}
}
// =======================================
// ============= JSONObject 数据 读写 ==============
// =======================================
/**
* 保存 JSONObject数据 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的JSON数据
*/
public void put(String key, JSONObject value) {
put(key, value.toString());
}
/**
* 保存 JSONObject数据 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的JSONObject数据
* @param saveTime
* 保存的时间,单位:秒
*/
public void put(String key, JSONObject value, int saveTime) {
put(key, value.toString(), saveTime);
}
/**
* 读取JSONObject数据
*
* @param key
* @return JSONObject数据
*/
public JSONObject getAsJSONObject(String key) {
String JSONString = getAsString(key);
try {
JSONObject obj = new JSONObject(JSONString);
return obj;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
// =======================================
// ============ JSONArray 数据 读写 =============
// =======================================
/**
* 保存 JSONArray数据 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的JSONArray数据
*/
public void put(String key, JSONArray value) {
put(key, value.toString());
}
/**
* 保存 JSONArray数据 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的JSONArray数据
* @param saveTime
* 保存的时间,单位:秒
*/
public void put(String key, JSONArray value, int saveTime) {
put(key, value.toString(), saveTime);
}
/**
* 读取JSONArray数据
*
* @param key
* @return JSONArray数据
*/
public JSONArray getAsJSONArray(String key) {
String JSONString = getAsString(key);
try {
JSONArray obj = new JSONArray(JSONString);
return obj;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
// =======================================
// ============== byte 数据 读写 =============
// =======================================
/**
* 保存 byte数据 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的数据
*/
public void put(String key, byte[] value) {
File file = mCache.newFile(key);
FileOutputStream out = null;
try {
out = new FileOutputStream(file);
out.write(value);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (out != null) {
try {
out.flush();
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
mCache.put(file);
}
}
/**
* 保存 byte数据 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的数据
* @param saveTime
* 保存的时间,单位:秒
*/
public void put(String key, byte[] value, int saveTime) {
put(key, Utils.newByteArrayWithDateInfo(saveTime, value));
}
/**
* 获取 byte 数据
*
* @param key
* @return byte 数据
*/
public byte[] getAsBinary(String key) {
RandomAccessFile RAFile = null;
boolean removeFile = false;
try {
File file = mCache.get(key);
if (!file.exists())
return null;
RAFile = new RandomAccessFile(file, "r");
byte[] byteArray = new byte[(int) RAFile.length()];
RAFile.read(byteArray);
if (!Utils.isDue(byteArray)) {
return Utils.clearDateInfo(byteArray);
} else {
removeFile = true;
return null;
}
} catch (Exception e) {
e.printStackTrace();
return null;
} finally {
if (RAFile != null) {
try {
RAFile.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (removeFile)
remove(key);
}
}
// =======================================
// ============= 序列化 数据 读写 ===============
// =======================================
/**
* 保存 Serializable数据 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的value
*/
public void put(String key, Serializable value) {
put(key, value, -1);
}
/**
* 保存 Serializable数据到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的value
* @param saveTime
* 保存的时间,单位:秒
*/
public void put(String key, Serializable value, int saveTime) {
ByteArrayOutputStream baos = null;
ObjectOutputStream oos = null;
try {
baos = new ByteArrayOutputStream();
oos = new ObjectOutputStream(baos);
oos.writeObject(value);
byte[] data = baos.toByteArray();
if (saveTime != -1) {
put(key, data, saveTime);
} else {
put(key, data);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
oos.close();
} catch (IOException e) {
}
}
}
/**
* 读取 Serializable数据
*
* @param key
* @return Serializable 数据
*/
public Object getAsObject(String key) {
byte[] data = getAsBinary(key);
if (data != null) {
ByteArrayInputStream bais = null;
ObjectInputStream ois = null;
try {
bais = new ByteArrayInputStream(data);
ois = new ObjectInputStream(bais);
Object reObject = ois.readObject();
return reObject;
} catch (Exception e) {
e.fillInStackTrace();
return null;
} finally {
try {
if (bais != null)
bais.close();
} catch (IOException e) {
e.fillInStackTrace();
}
try {
if (ois != null)
ois.close();
} catch (IOException e) {
e.fillInStackTrace();
}
}
}
return null;
}
// =======================================
// ============== bitmap 数据 读写 =============
// =======================================
/**
* 保存 bitmap 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的bitmap数据
*/
public void put(String key, Bitmap value) {
put(key, Utils.Bitmap2Bytes(value));
}
/**
* 保存 bitmap 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的 bitmap 数据
* @param saveTime
* 保存的时间,单位:秒
*/
public void put(String key, Bitmap value, int saveTime) {
put(key, Utils.Bitmap2Bytes(value), saveTime);
}
/**
* 读取 bitmap 数据
*
* @param key
* @return bitmap 数据
*/
public Bitmap getAsBitmap(String key) {
if (getAsBinary(key) == null) {
return null;
}
return Utils.Bytes2Bimap(getAsBinary(key));
}
// =======================================
// ============= drawable 数据 读写 =============
// =======================================
/**
* 保存 drawable 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的drawable数据
*/
public void put(String key, Drawable value) {
put(key, Utils.drawable2Bitmap(value));
}
/**
* 保存 drawable 到 缓存中
*
* @param key
* 保存的key
* @param value
* 保存的 drawable 数据
* @param saveTime
* 保存的时间,单位:秒
*/
public void put(String key, Drawable value, int saveTime) {
put(key, Utils.drawable2Bitmap(value), saveTime);
}
/**
* 读取 Drawable 数据
*
* @param key
* @return Drawable 数据
*/
public Drawable getAsDrawable(String key) {
if (getAsBinary(key) == null) {
return null;
}
return Utils.bitmap2Drawable(Utils.Bytes2Bimap(getAsBinary(key)));
}
/**
* 获取缓存文件
*
* @param key
* @return value 缓存的文件
*/
public File file(String key) {
File f = mCache.newFile(key);
if (f.exists())
return f;
return null;
}
/**
* 移除某个key
*
* @param key
* @return 是否移除成功
*/
public boolean remove(String key) {
return mCache.remove(key);
}
/**
* 清除所有数据
*/
public void clear() {
mCache.clear();
}
/**
* @title 缓存管理器
* @author 杨福海(michael) www.yangfuhai.com
* @version 1.0
*/
public class ACacheManager {
private final AtomicLong cacheSize;
private final AtomicInteger cacheCount;
private final long sizeLimit;
private final int countLimit;
private final Map<File, Long> lastUsageDates = Collections
.synchronizedMap(new HashMap<File, Long>());
protected File cacheDir;
private ACacheManager(File cacheDir, long sizeLimit, int countLimit) {
this.cacheDir = cacheDir;
this.sizeLimit = sizeLimit;
this.countLimit = countLimit;
cacheSize = new AtomicLong();
cacheCount = new AtomicInteger();
calculateCacheSizeAndCacheCount();
}
/**
* 计算 cacheSize和cacheCount
*/
private void calculateCacheSizeAndCacheCount() {
new Thread(new Runnable() {
@Override
public void run() {
int size = 0;
int count = 0;
File[] cachedFiles = cacheDir.listFiles();
if (cachedFiles != null) {
for (File cachedFile : cachedFiles) {
size += calculateSize(cachedFile);
count += 1;
lastUsageDates.put(cachedFile,
cachedFile.lastModified());
}
cacheSize.set(size);
cacheCount.set(count);
}
}
}).start();
}
private void put(File file) {
int curCacheCount = cacheCount.get();
while (curCacheCount + 1 > countLimit) {
long freedSize = removeNext();
cacheSize.addAndGet(-freedSize);
curCacheCount = cacheCount.addAndGet(-1);
}
cacheCount.addAndGet(1);
long valueSize = calculateSize(file);
long curCacheSize = cacheSize.get();
while (curCacheSize + valueSize > sizeLimit) {
long freedSize = removeNext();
curCacheSize = cacheSize.addAndGet(-freedSize);
}
cacheSize.addAndGet(valueSize);
Long currentTime = System.currentTimeMillis();
file.setLastModified(currentTime);
lastUsageDates.put(file, currentTime);
}
private File get(String key) {
File file = newFile(key);
Long currentTime = System.currentTimeMillis();
file.setLastModified(currentTime);
lastUsageDates.put(file, currentTime);
return file;
}
private File newFile(String key) {
return new File(cacheDir, key.hashCode() + "");
}
private boolean remove(String key) {
File image = get(key);
return image.delete();
}
private void clear() {
lastUsageDates.clear();
cacheSize.set(0);
File[] files = cacheDir.listFiles();
if (files != null) {
for (File f : files) {
f.delete();
}
}
}
/**
* 移除旧的文件
*
* @return
*/
private long removeNext() {
if (lastUsageDates.isEmpty()) {
return 0;
}
Long oldestUsage = null;
File mostLongUsedFile = null;
Set<Entry<File, Long>> entries = lastUsageDates.entrySet();
synchronized (lastUsageDates) {
for (Entry<File, Long> entry : entries) {
if (mostLongUsedFile == null) {
mostLongUsedFile = entry.getKey();
oldestUsage = entry.getValue();
} else {
Long lastValueUsage = entry.getValue();
if (lastValueUsage < oldestUsage) {
oldestUsage = lastValueUsage;
mostLongUsedFile = entry.getKey();
}
}
}
}
long fileSize = calculateSize(mostLongUsedFile);
if (mostLongUsedFile.delete()) {
lastUsageDates.remove(mostLongUsedFile);
}
return fileSize;
}
private long calculateSize(File file) {
return file.length();
}
}
/**
* @title 时间计算工具类
* @author 杨福海(michael) www.yangfuhai.com
* @version 1.0
*/
private static class Utils {
/**
* 判断缓存的String数据是否到期
*
* @param str
* @return true:到期了 false:还没有到期
*/
private static boolean isDue(String str) {
return isDue(str.getBytes());
}
/**
* 判断缓存的byte数据是否到期
*
* @param data
* @return true:到期了 false:还没有到期
*/
private static boolean isDue(byte[] data) {
String[] strs = getDateInfoFromDate(data);
if (strs != null && strs.length == 2) {
String saveTimeStr = strs[0];
while (saveTimeStr.startsWith("0")) {
saveTimeStr = saveTimeStr
.substring(1, saveTimeStr.length());
}
long saveTime = Long.valueOf(saveTimeStr);
long deleteAfter = Long.valueOf(strs[1]);
if (System.currentTimeMillis() > saveTime + deleteAfter * 1000) {
return true;
}
}
return false;
}
private static String newStringWithDateInfo(int second, String strInfo) {
return createDateInfo(second) + strInfo;
}
private static byte[] newByteArrayWithDateInfo(int second, byte[] data2) {
byte[] data1 = createDateInfo(second).getBytes();
byte[] retdata = new byte[data1.length + data2.length];
System.arraycopy(data1, 0, retdata, 0, data1.length);
System.arraycopy(data2, 0, retdata, data1.length, data2.length);
return retdata;
}
private static String clearDateInfo(String strInfo) {
if (strInfo != null && hasDateInfo(strInfo.getBytes())) {
strInfo = strInfo.substring(strInfo.indexOf(mSeparator) + 1,
strInfo.length());
}
return strInfo;
}
private static byte[] clearDateInfo(byte[] data) {
if (hasDateInfo(data)) {
return copyOfRange(data, indexOf(data, mSeparator) + 1,
data.length);
}
return data;
}
private static boolean hasDateInfo(byte[] data) {
return data != null && data.length > 15 && data[13] == '-'
&& indexOf(data, mSeparator) > 14;
}
private static String[] getDateInfoFromDate(byte[] data) {
if (hasDateInfo(data)) {
String saveDate = new String(copyOfRange(data, 0, 13));
String deleteAfter = new String(copyOfRange(data, 14,
indexOf(data, mSeparator)));
return new String[] { saveDate, deleteAfter };
}
return null;
}
private static int indexOf(byte[] data, char c) {
for (int i = 0; i < data.length; i++) {
if (data[i] == c) {
return i;
}
}
return -1;
}
private static byte[] copyOfRange(byte[] original, int from, int to) {
int newLength = to - from;
if (newLength < 0)
throw new IllegalArgumentException(from + " > " + to);
byte[] copy = new byte[newLength];
System.arraycopy(original, from, copy, 0,
Math.min(original.length - from, newLength));
return copy;
}
private static final char mSeparator = ' ';
private static String createDateInfo(int second) {
String currentTime = System.currentTimeMillis() + "";
while (currentTime.length() < 13) {
currentTime = "0" + currentTime;
}
return currentTime + "-" + second + mSeparator;
}
/*
* Bitmap → byte[]
*/
private static byte[] Bitmap2Bytes(Bitmap bm) {
if (bm == null) {
return null;
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.PNG, 100, baos);
return baos.toByteArray();
}
/*
* byte[] → Bitmap
*/
private static Bitmap Bytes2Bimap(byte[] b) {
if (b.length == 0) {
return null;
}
return BitmapFactory.decodeByteArray(b, 0, b.length);
}
/*
* Drawable → Bitmap
*/
private static Bitmap drawable2Bitmap(Drawable drawable) {
if (drawable == null) {
return null;
}
// 取 drawable 的长宽
int w = drawable.getIntrinsicWidth();
int h = drawable.getIntrinsicHeight();
// 取 drawable 的颜色格式
Bitmap.Config config = drawable.getOpacity() != PixelFormat.OPAQUE ? Bitmap.Config.ARGB_8888
: Bitmap.Config.RGB_565;
// 建立对应 bitmap
Bitmap bitmap = Bitmap.createBitmap(w, h, config);
// 建立对应 bitmap 的画布
Canvas canvas = new Canvas(bitmap);
drawable.setBounds(0, 0, w, h);
// 把 drawable 内容画到画布中
drawable.draw(canvas);
return bitmap;
}
/*
* Bitmap → Drawable
*/
@SuppressWarnings("deprecation")
private static Drawable bitmap2Drawable(Bitmap bm) {
if (bm == null) {
return null;
}
return new BitmapDrawable(bm);
}
}
}
| apache-2.0 |
Shelley132/java-review | design-pattern/visitor/Keyboard.java | 285 | package visitor;
/**
* @author JUANJUAN
* @version 2017年7月19日上午9:32:37
*/
public class Keyboard implements ComputerPart{
@Override
public void accept(ComputerPartVisitor computerPartVisitor) {
// TODO Auto-generated method stub
computerPartVisitor.visit(this);
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-securityhub/src/main/java/com/amazonaws/services/securityhub/model/CustomDataIdentifiersResult.java | 7149 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.securityhub.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Contains an instance of sensitive data that was detected by a customer-defined identifier.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/securityhub-2018-10-26/CustomDataIdentifiersResult"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CustomDataIdentifiersResult implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The list of detected instances of sensitive data.
* </p>
*/
private java.util.List<CustomDataIdentifiersDetections> detections;
/**
* <p>
* The total number of occurrences of sensitive data.
* </p>
*/
private Long totalCount;
/**
* <p>
* The list of detected instances of sensitive data.
* </p>
*
* @return The list of detected instances of sensitive data.
*/
public java.util.List<CustomDataIdentifiersDetections> getDetections() {
return detections;
}
/**
* <p>
* The list of detected instances of sensitive data.
* </p>
*
* @param detections
* The list of detected instances of sensitive data.
*/
public void setDetections(java.util.Collection<CustomDataIdentifiersDetections> detections) {
if (detections == null) {
this.detections = null;
return;
}
this.detections = new java.util.ArrayList<CustomDataIdentifiersDetections>(detections);
}
/**
* <p>
* The list of detected instances of sensitive data.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setDetections(java.util.Collection)} or {@link #withDetections(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param detections
* The list of detected instances of sensitive data.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomDataIdentifiersResult withDetections(CustomDataIdentifiersDetections... detections) {
if (this.detections == null) {
setDetections(new java.util.ArrayList<CustomDataIdentifiersDetections>(detections.length));
}
for (CustomDataIdentifiersDetections ele : detections) {
this.detections.add(ele);
}
return this;
}
/**
* <p>
* The list of detected instances of sensitive data.
* </p>
*
* @param detections
* The list of detected instances of sensitive data.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomDataIdentifiersResult withDetections(java.util.Collection<CustomDataIdentifiersDetections> detections) {
setDetections(detections);
return this;
}
/**
* <p>
* The total number of occurrences of sensitive data.
* </p>
*
* @param totalCount
* The total number of occurrences of sensitive data.
*/
public void setTotalCount(Long totalCount) {
this.totalCount = totalCount;
}
/**
* <p>
* The total number of occurrences of sensitive data.
* </p>
*
* @return The total number of occurrences of sensitive data.
*/
public Long getTotalCount() {
return this.totalCount;
}
/**
* <p>
* The total number of occurrences of sensitive data.
* </p>
*
* @param totalCount
* The total number of occurrences of sensitive data.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomDataIdentifiersResult withTotalCount(Long totalCount) {
setTotalCount(totalCount);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDetections() != null)
sb.append("Detections: ").append(getDetections()).append(",");
if (getTotalCount() != null)
sb.append("TotalCount: ").append(getTotalCount());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CustomDataIdentifiersResult == false)
return false;
CustomDataIdentifiersResult other = (CustomDataIdentifiersResult) obj;
if (other.getDetections() == null ^ this.getDetections() == null)
return false;
if (other.getDetections() != null && other.getDetections().equals(this.getDetections()) == false)
return false;
if (other.getTotalCount() == null ^ this.getTotalCount() == null)
return false;
if (other.getTotalCount() != null && other.getTotalCount().equals(this.getTotalCount()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDetections() == null) ? 0 : getDetections().hashCode());
hashCode = prime * hashCode + ((getTotalCount() == null) ? 0 : getTotalCount().hashCode());
return hashCode;
}
@Override
public CustomDataIdentifiersResult clone() {
try {
return (CustomDataIdentifiersResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.securityhub.model.transform.CustomDataIdentifiersResultMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| apache-2.0 |
FasterXML/jackson-databind | src/test/java/com/fasterxml/jackson/databind/jsontype/TestVisibleTypeId.java | 8419 | package com.fasterxml.jackson.databind.jsontype;
import com.fasterxml.jackson.annotation.*;
import com.fasterxml.jackson.annotation.JsonTypeInfo.As;
import com.fasterxml.jackson.annotation.JsonTypeInfo.Id;
import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.exc.InvalidDefinitionException;
/**
* Tests to verify that Type Id may be exposed during deserialization,
*/
public class TestVisibleTypeId extends BaseMapTest
{
// type id as property, exposed
@JsonTypeInfo(use=JsonTypeInfo.Id.NAME, include=JsonTypeInfo.As.PROPERTY,
property="type", visible=true)
@JsonTypeName("BaseType")
static class PropertyBean {
public int a = 3;
protected String type;
public void setType(String t) { type = t; }
}
// as wrapper-array
@JsonTypeInfo(use=JsonTypeInfo.Id.NAME, include=JsonTypeInfo.As.WRAPPER_ARRAY,
property="type", visible=true)
@JsonTypeName("ArrayType")
static class WrapperArrayBean {
public int a = 1;
protected String type;
public void setType(String t) { type = t; }
}
// as wrapper-object
@JsonTypeInfo(use=JsonTypeInfo.Id.NAME, include=JsonTypeInfo.As.WRAPPER_OBJECT,
property="type", visible=true)
@JsonTypeName("ObjectType")
static class WrapperObjectBean {
public int a = 2;
protected String type;
public void setType(String t) { type = t; }
}
@JsonTypeName("ExternalType")
static class ExternalIdBean {
public int a = 2;
protected String type;
public void setType(String t) { type = t; }
}
// // // [JACKSON-762]: type id from property
@JsonTypeInfo(use=JsonTypeInfo.Id.NAME, include=JsonTypeInfo.As.PROPERTY,
property="type")
static class TypeIdFromFieldProperty {
public int a = 3;
@JsonTypeId
public String type = "SomeType";
}
@JsonTypeInfo(use=JsonTypeInfo.Id.NAME, include=JsonTypeInfo.As.WRAPPER_ARRAY,
property="type")
static class TypeIdFromFieldArray {
public int a = 3;
@JsonTypeId
public String type = "SomeType";
}
@JsonTypeInfo(use=JsonTypeInfo.Id.NAME, include=JsonTypeInfo.As.WRAPPER_OBJECT,
property="type")
static class TypeIdFromMethodObject {
public int a = 3;
@JsonTypeId
public String getType() { return "SomeType"; }
}
static class ExternalIdWrapper2 {
@JsonTypeInfo(use=JsonTypeInfo.Id.NAME, include=JsonTypeInfo.As.EXTERNAL_PROPERTY,
property="type", visible=true)
public ExternalIdBean2 bean = new ExternalIdBean2();
}
static class ExternalIdBean2 {
public int a = 2;
/* Type id property itself cannot be external, as it is conceptually
* part of the bean for which info is written:
*/
@JsonTypeId
public String getType() { return "SomeType"; }
}
// Invalid definition: multiple type ids
static class MultipleIds {
@JsonTypeId
public String type1 = "type1";
@JsonTypeId
public String getType2() { return "type2"; };
}
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "name")
@JsonSubTypes({ @JsonSubTypes.Type(value=I263Impl.class) })
public static abstract class I263Base {
@JsonTypeId
public abstract String getName();
}
@JsonPropertyOrder({ "age", "name" })
@JsonTypeName("bob")
public static class I263Impl extends I263Base
{
@Override
public String getName() { return "bob"; }
public int age = 41;
}
// [databind#408]
static class ExternalBeanWithId
{
protected String _type;
@JsonTypeInfo(use=Id.NAME, include=As.EXTERNAL_PROPERTY, property="type", visible=true)
public ValueBean bean;
public ExternalBeanWithId() { }
public ExternalBeanWithId(int v) {
bean = new ValueBean(v);
}
public void setType(String t) {
_type = t;
}
}
@JsonTypeName("vbean")
static class ValueBean {
public int value;
public ValueBean() { }
public ValueBean(int v) { value = v; }
}
/*
/**********************************************************
/* Unit tests, success
/**********************************************************
*/
private final ObjectMapper MAPPER = new ObjectMapper();
public void testVisibleWithProperty() throws Exception
{
String json = MAPPER.writeValueAsString(new PropertyBean());
// just default behavior:
assertEquals("{\"type\":\"BaseType\",\"a\":3}", json);
// but then expect to read it back
PropertyBean result = MAPPER.readValue(json, PropertyBean.class);
assertEquals("BaseType", result.type);
// also, should work with order reversed
result = MAPPER.readValue("{\"a\":7, \"type\":\"BaseType\"}", PropertyBean.class);
assertEquals(7, result.a);
assertEquals("BaseType", result.type);
}
public void testVisibleWithWrapperArray() throws Exception
{
String json = MAPPER.writeValueAsString(new WrapperArrayBean());
// just default behavior:
assertEquals("[\"ArrayType\",{\"a\":1}]", json);
// but then expect to read it back
WrapperArrayBean result = MAPPER.readValue(json, WrapperArrayBean.class);
assertEquals("ArrayType", result.type);
assertEquals(1, result.a);
}
public void testVisibleWithWrapperObject() throws Exception
{
String json = MAPPER.writeValueAsString(new WrapperObjectBean());
assertEquals("{\"ObjectType\":{\"a\":2}}", json);
// but then expect to read it back
WrapperObjectBean result = MAPPER.readValue(json, WrapperObjectBean.class);
assertEquals("ObjectType", result.type);
}
public void testTypeIdFromProperty() throws Exception
{
assertEquals("{\"type\":\"SomeType\",\"a\":3}",
MAPPER.writeValueAsString(new TypeIdFromFieldProperty()));
}
public void testTypeIdFromArray() throws Exception
{
assertEquals("[\"SomeType\",{\"a\":3}]",
MAPPER.writeValueAsString(new TypeIdFromFieldArray()));
}
public void testTypeIdFromObject() throws Exception
{
assertEquals("{\"SomeType\":{\"a\":3}}",
MAPPER.writeValueAsString(new TypeIdFromMethodObject()));
}
public void testTypeIdFromExternal() throws Exception
{
String json = MAPPER.writeValueAsString(new ExternalIdWrapper2());
// Implementation detail: type id written AFTER value, due to constraints
assertEquals("{\"bean\":{\"a\":2},\"type\":\"SomeType\"}", json);
}
public void testIssue263() throws Exception
{
// first, serialize:
assertEquals("{\"name\":\"bob\",\"age\":41}", MAPPER.writeValueAsString(new I263Impl()));
// then bring back:
I263Base result = MAPPER.readValue("{\"age\":19,\"name\":\"bob\"}", I263Base.class);
assertTrue(result instanceof I263Impl);
assertEquals(19, ((I263Impl) result).age);
}
// [databind#408]
/* NOTE: Handling changed between 2.4 and 2.5; earlier, type id was 'injected'
* inside POJO; but with 2.5 this was fixed so it would remain outside, similar
* to how JSON structure is.
*/
public void testVisibleTypeId408() throws Exception
{
String json = MAPPER.writeValueAsString(new ExternalBeanWithId(3));
ExternalBeanWithId result = MAPPER.readValue(json, ExternalBeanWithId.class);
assertNotNull(result);
assertNotNull(result.bean);
assertEquals(3, result.bean.value);
assertEquals("vbean", result._type);
}
/*
/**********************************************************
/* Unit tests, fails
/**********************************************************
*/
public void testInvalidMultipleTypeIds() throws Exception
{
try {
MAPPER.writeValueAsString(new MultipleIds());
fail("Should have failed");
} catch (InvalidDefinitionException e) {
verifyException(e, "multiple type ids");
}
}
}
| apache-2.0 |
prowide/prowide-core | src/test/java/com/prowidesoftware/swift/model/field/Field53BTest.java | 3170 | /*
* Copyright 2006-2021 Prowide
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.prowidesoftware.swift.model.field;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import org.junit.jupiter.api.Test;
public class Field53BTest extends AbstractFieldTest {
@Override
@Test
public void testSerialization() {
testSerializationImpl("53B",
"/00010001380002000114",
"/D/1234/56",
"/D\nabcd"
);
}
@Test
public void test53B() {
Field53B f = new Field53B((String) null);
assertNull(f.getDCMark());
assertNull(f.getAccount());
assertNull(f.getComponent3());
f = new Field53B("");
assertNull(f.getDCMark());
assertNull(f.getAccount());
assertNull(f.getComponent3());
f = new Field53B("/D");
assertNull(f.getDCMark());
assertEquals("D", f.getAccount());
assertNull(f.getComponent3());
f = new Field53B("/D/");
assertEquals("D", f.getDCMark());
assertNull(f.getAccount());
assertNull(f.getComponent3());
f = new Field53B("/D/1234");
assertEquals("D", f.getDCMark());
assertEquals("1234", f.getAccount());
assertNull(f.getComponent3());
f = new Field53B("/D/1234/56");
assertEquals("D", f.getDCMark());
assertEquals("1234/56", f.getAccount());
assertNull(f.getComponent3());
f = new Field53B("abcd");
assertNull(f.getDCMark());
assertNull(f.getAccount());
assertEquals("abcd", f.getComponent3());
f = new Field53B("/D/1234\nabcd");
assertEquals("D", f.getDCMark());
assertEquals("1234", f.getAccount());
assertEquals("abcd", f.getComponent3());
f = new Field53B("/D\nabcd");
assertNull(f.getDCMark());
assertEquals("D", f.getAccount());
assertEquals("abcd", f.getComponent3());
f = new Field53B("/1234\nabcd");
assertNull(f.getDCMark());
assertEquals("1234", f.getAccount());
assertEquals("abcd", f.getComponent3());
f = new Field53B("/00010001380002000114");
assertEquals("00010001380002000114", f.getAccount());
assertNull(f.getDCMark());
assertNull(f.getComponent3());
}
@Test
public void testGetValueDisplay() {
Field53B f = new Field53B("/00010001380002000114");
assertEquals("00010001380002000114", f.getAccount());
assertNull(f.getDCMark());
assertNull(f.getComponent3());
}
} | apache-2.0 |
opetrovski/development | oscm-portal-stubbed/javasrc/org/oscm/ui/common/CustomServiceAccess2.java | 1595 | /*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Author: pock
*
* Creation Date: 09.12.2010
*
*******************************************************************************/
package org.oscm.ui.common;
import javax.naming.CommunicationException;
import javax.security.auth.login.LoginException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.oscm.ui.services.MockService;
import org.oscm.internal.vo.VOUser;
/**
* Implementation to access the mock service
*/
public class CustomServiceAccess2 extends ServiceAccess {
protected CustomServiceAccess2() {
}
@Override
public <T> T getService(Class<T> clazz) {
T service = clazz.cast(MockService.getInstance());
return service;
}
@Override
public void doLogin(VOUser userObject, String password,
HttpServletRequest request, HttpServletResponse response)
throws CommunicationException, LoginException {
// do nothing
}
@Override
protected boolean createSession() {
return true;
}
}
| apache-2.0 |
crate/crate | server/src/main/java/io/crate/expression/eval/NullEliminator.java | 4775 | /*
* Licensed to Crate.io GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.expression.eval;
import io.crate.expression.symbol.Function;
import io.crate.expression.symbol.FunctionCopyVisitor;
import io.crate.expression.symbol.Literal;
import io.crate.expression.symbol.Symbol;
import io.crate.expression.operator.Operators;
import io.crate.expression.predicate.NotPredicate;
/**
* Inside a query, NULL values as logical operator arguments can be treated like boolean FALSE.
* As a result, the query can mostly be optimized and turned into a lucene query.
* Without this optimization chances are high that a genericFunctionFilter is used instead of a lucene query which
* results in poor performance (table scan -> filter).
* <p>
* <pre>
* Example:
*
* NULL AND col1 = 1
*
* can be handled as:
*
* FALSE AND col1 = 1
*
* which can be immediately normalized to:
*
* FALSE -> NO-MATCH
* </pre>
*
* @implNote If a NOT predicate is encountered inside the tree, the current boolean to replace a NULL must be inverted
* for that leaf. Also traversing must be stopped if a conditional function is encountered as they can handle NULL
* values in a concrete way.
*/
public final class NullEliminator {
private static final Visitor VISITOR = new Visitor();
/**
* Eliminates NULLs inside the given query symbol if possible.
* Also see {@link NullEliminator} class documentation for details.
*
* @param symbol The query symbol to operate on.
* @param postProcessor A function applied only on function symbols which changed due to NULL replacement.
*/
public static Symbol eliminateNullsIfPossible(Symbol symbol,
java.util.function.Function<Symbol, Symbol> postProcessor) {
return symbol.accept(VISITOR, new Context(postProcessor));
}
private static class Context {
private final java.util.function.Function<Symbol, Symbol> postProcessor;
boolean insideLogicalOperator = false;
boolean nullReplacement = false;
public Context(java.util.function.Function<Symbol, Symbol> postProcessor) {
this.postProcessor = postProcessor;
}
}
private static class Visitor extends FunctionCopyVisitor<Context> {
@Override
public Symbol visitFunction(Function func, Context context) {
String functionName = func.name();
// only operate inside logical operators
if (Operators.LOGICAL_OPERATORS.contains(functionName)) {
final boolean currentNullReplacement = context.nullReplacement;
final boolean currentInsideLogicalOperator = context.insideLogicalOperator;
context.insideLogicalOperator = true;
if (NotPredicate.NAME.equals(functionName)) {
// not(null) -> not(false) would evaluate to true, so replacement boolean must be flipped
context.nullReplacement = !currentNullReplacement;
}
Symbol newFunc = super.visitFunction(func, context);
if (newFunc != func) {
newFunc = context.postProcessor.apply(newFunc);
}
// reset context
context.insideLogicalOperator = currentInsideLogicalOperator;
context.nullReplacement = currentNullReplacement;
return newFunc;
}
return func;
}
@Override
public Symbol visitLiteral(Literal symbol, Context context) {
if (context.insideLogicalOperator && symbol.value() == null) {
return Literal.of(context.nullReplacement);
}
return symbol;
}
}
private NullEliminator() {
}
}
| apache-2.0 |
b3log/b3log-solo | src/test/java/org/b3log/solo/processor/console/CommentConsoleTestCase.java | 3799 | /*
* Solo - A small and beautiful blogging system written in Java.
* Copyright (c) 2010-present, b3log.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.b3log.solo.processor.console;
import org.apache.commons.lang.StringUtils;
import org.b3log.latke.Keys;
import org.b3log.solo.AbstractTestCase;
import org.b3log.solo.MockHttpServletRequest;
import org.b3log.solo.MockHttpServletResponse;
import org.json.JSONObject;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.util.List;
/**
* {@link CommentConsole} test case.
*
* @author <a href="http://88250.b3log.org">Liang Ding</a>
* @version 1.1.0.0, Dec 11, 2018
* @since 2.9.8
*/
@Test(suiteName = "processor")
public class CommentConsoleTestCase extends AbstractTestCase {
/**
* Init.
*
* @throws Exception exception
*/
@Test
public void init() throws Exception {
super.init();
}
/**
* getComments.
*
* @throws Exception exception
*/
@Test(dependsOnMethods = "init")
public void getComments() throws Exception {
final MockHttpServletRequest request = mockRequest();
request.setRequestURI("/console/comments/1/10/20");
mockAdminLogin(request);
final MockHttpServletResponse response = mockResponse();
mockDispatcherServletService(request, response);
final String content = response.body();
Assert.assertTrue(StringUtils.contains(content, "sc\":true"));
}
/**
* getArticleComments.
*
* @throws Exception exception
*/
@Test(dependsOnMethods = "init")
public void getArticleComments() throws Exception {
final List<JSONObject> recentArticles = getArticleQueryService().getRecentArticles(1);
final JSONObject article = recentArticles.get(0);
final String articleId = article.optString(Keys.OBJECT_ID);
final MockHttpServletRequest request = mockRequest();
request.setRequestURI("/console/comments/article/" + articleId);
mockAdminLogin(request);
final MockHttpServletResponse response = mockResponse();
mockDispatcherServletService(request, response);
final String content = response.body();
Assert.assertTrue(StringUtils.contains(content, "sc\":true"));
}
/**
* removeArticleComment.
*
* @throws Exception exception
*/
@Test(dependsOnMethods = "init")
public void removeArticleComment() throws Exception {
final List<JSONObject> recentComments = getCommentRepository().getRecentComments(1);
final JSONObject comment = recentComments.get(0);
final String commentId = comment.optString(Keys.OBJECT_ID);
final MockHttpServletRequest request = mockRequest();
request.setRequestURI("/console/article/comment/" + commentId);
request.setMethod("DELETE");
mockAdminLogin(request);
final MockHttpServletResponse response = mockResponse();
mockDispatcherServletService(request, response);
final String content = response.body();
Assert.assertTrue(StringUtils.contains(content, "sc\":true"));
}
}
| apache-2.0 |
DomDerrien/amazon-fps-gaej | src/com/amazonaws/fps/model/GetTokenByCallerResult.java | 5450 |
package com.amazonaws.fps.model;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="Token" type="{http://fps.amazonaws.com/doc/2008-09-17/}Token" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
* Generated by AWS Code Generator
* <p/>
* Tue Sep 29 03:25:23 PDT 2009
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"token"
})
@XmlRootElement(name = "GetTokenByCallerResult")
public class GetTokenByCallerResult {
@XmlElement(name = "Token")
protected Token token;
/**
* Default constructor
*
*/
public GetTokenByCallerResult() {
super();
}
/**
* Value constructor
*
*/
public GetTokenByCallerResult(final Token token) {
this.token = token;
}
/**
* Gets the value of the token property.
*
* @return
* possible object is
* {@link Token }
*
*/
public Token getToken() {
return token;
}
/**
* Sets the value of the token property.
*
* @param value
* allowed object is
* {@link Token }
*
*/
public void setToken(Token value) {
this.token = value;
}
public boolean isSetToken() {
return (this.token!= null);
}
/**
* Sets the value of the Token property.
*
* @param value
* @return
* this instance
*/
public GetTokenByCallerResult withToken(Token value) {
setToken(value);
return this;
}
/**
*
* XML fragment representation of this object
*
* @return XML fragment for this object. Name for outer
* tag expected to be set by calling method. This fragment
* returns inner properties representation only
*/
protected String toXMLFragment() {
StringBuffer xml = new StringBuffer();
if (isSetToken()) {
Token token = getToken();
xml.append("<Token>");
xml.append(token.toXMLFragment());
xml.append("</Token>");
}
return xml.toString();
}
/**
*
* Escape XML special characters
*/
private String escapeXML(String string) {
StringBuffer sb = new StringBuffer();
int length = string.length();
for (int i = 0; i < length; ++i) {
char c = string.charAt(i);
switch (c) {
case '&':
sb.append("&");
break;
case '<':
sb.append("<");
break;
case '>':
sb.append(">");
break;
case '\'':
sb.append("'");
break;
case '"':
sb.append(""");
break;
default:
sb.append(c);
}
}
return sb.toString();
}
/**
*
* JSON fragment representation of this object
*
* @return JSON fragment for this object. Name for outer
* object expected to be set by calling method. This fragment
* returns inner properties representation only
*
*/
protected String toJSONFragment() {
StringBuffer json = new StringBuffer();
boolean first = true;
if (isSetToken()) {
if (!first) json.append(", ");
json.append("\"Token\" : {");
Token token = getToken();
json.append(token.toJSONFragment());
json.append("}");
first = false;
}
return json.toString();
}
/**
*
* Quote JSON string
*/
private String quoteJSON(String string) {
StringBuffer sb = new StringBuffer();
sb.append("\"");
int length = string.length();
for (int i = 0; i < length; ++i) {
char c = string.charAt(i);
switch (c) {
case '"':
sb.append("\\\"");
break;
case '\\':
sb.append("\\\\");
break;
case '/':
sb.append("\\/");
break;
case '\b':
sb.append("\\b");
break;
case '\f':
sb.append("\\f");
break;
case '\n':
sb.append("\\n");
break;
case '\r':
sb.append("\\r");
break;
case '\t':
sb.append("\\t");
break;
default:
if (c < ' ') {
sb.append("\\u" + String.format("%03x", Integer.valueOf(c)));
} else {
sb.append(c);
}
}
}
sb.append("\"");
return sb.toString();
}
}
| apache-2.0 |
prateek1306/presto | presto-main/src/main/java/com/facebook/presto/sql/planner/RelationPlanner.java | 42595 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner;
import com.facebook.presto.Session;
import com.facebook.presto.SystemSessionProperties;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.metadata.TableHandle;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.spi.type.ArrayType;
import com.facebook.presto.spi.type.MapType;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.ExpressionUtils;
import com.facebook.presto.sql.analyzer.Analysis;
import com.facebook.presto.sql.analyzer.Field;
import com.facebook.presto.sql.analyzer.RelationId;
import com.facebook.presto.sql.analyzer.RelationType;
import com.facebook.presto.sql.analyzer.Scope;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.Assignments;
import com.facebook.presto.sql.planner.plan.ExceptNode;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.IntersectNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.SampleNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.UnionNode;
import com.facebook.presto.sql.planner.plan.UnnestNode;
import com.facebook.presto.sql.planner.plan.ValuesNode;
import com.facebook.presto.sql.tree.AliasedRelation;
import com.facebook.presto.sql.tree.Cast;
import com.facebook.presto.sql.tree.CoalesceExpression;
import com.facebook.presto.sql.tree.ComparisonExpression;
import com.facebook.presto.sql.tree.ComparisonExpressionType;
import com.facebook.presto.sql.tree.DefaultTraversalVisitor;
import com.facebook.presto.sql.tree.Except;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.ExpressionTreeRewriter;
import com.facebook.presto.sql.tree.Identifier;
import com.facebook.presto.sql.tree.InPredicate;
import com.facebook.presto.sql.tree.Intersect;
import com.facebook.presto.sql.tree.Join;
import com.facebook.presto.sql.tree.JoinUsing;
import com.facebook.presto.sql.tree.LambdaArgumentDeclaration;
import com.facebook.presto.sql.tree.Lateral;
import com.facebook.presto.sql.tree.NodeRef;
import com.facebook.presto.sql.tree.QualifiedName;
import com.facebook.presto.sql.tree.Query;
import com.facebook.presto.sql.tree.QuerySpecification;
import com.facebook.presto.sql.tree.Relation;
import com.facebook.presto.sql.tree.Row;
import com.facebook.presto.sql.tree.SampledRelation;
import com.facebook.presto.sql.tree.SetOperation;
import com.facebook.presto.sql.tree.SymbolReference;
import com.facebook.presto.sql.tree.Table;
import com.facebook.presto.sql.tree.TableSubquery;
import com.facebook.presto.sql.tree.Union;
import com.facebook.presto.sql.tree.Unnest;
import com.facebook.presto.sql.tree.Values;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.UnmodifiableIterator;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import static com.facebook.presto.sql.analyzer.SemanticExceptions.notSupportedException;
import static com.facebook.presto.sql.planner.ExpressionInterpreter.evaluateConstantExpression;
import static com.facebook.presto.sql.tree.Join.Type.INNER;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.util.Objects.requireNonNull;
class RelationPlanner
extends DefaultTraversalVisitor<RelationPlan, Void>
{
private final Analysis analysis;
private final SymbolAllocator symbolAllocator;
private final PlanNodeIdAllocator idAllocator;
private final Map<NodeRef<LambdaArgumentDeclaration>, Symbol> lambdaDeclarationToSymbolMap;
private final Metadata metadata;
private final Session session;
private final SubqueryPlanner subqueryPlanner;
RelationPlanner(
Analysis analysis,
SymbolAllocator symbolAllocator,
PlanNodeIdAllocator idAllocator,
Map<NodeRef<LambdaArgumentDeclaration>, Symbol> lambdaDeclarationToSymbolMap,
Metadata metadata,
Session session)
{
requireNonNull(analysis, "analysis is null");
requireNonNull(symbolAllocator, "symbolAllocator is null");
requireNonNull(idAllocator, "idAllocator is null");
requireNonNull(lambdaDeclarationToSymbolMap, "lambdaDeclarationToSymbolMap is null");
requireNonNull(metadata, "metadata is null");
requireNonNull(session, "session is null");
this.analysis = analysis;
this.symbolAllocator = symbolAllocator;
this.idAllocator = idAllocator;
this.lambdaDeclarationToSymbolMap = lambdaDeclarationToSymbolMap;
this.metadata = metadata;
this.session = session;
this.subqueryPlanner = new SubqueryPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, metadata, session, analysis.getParameters());
}
@Override
protected RelationPlan visitTable(Table node, Void context)
{
Query namedQuery = analysis.getNamedQuery(node);
Scope scope = analysis.getScope(node);
if (namedQuery != null) {
RelationPlan subPlan = process(namedQuery, null);
// Add implicit coercions if view query produces types that don't match the declared output types
// of the view (e.g., if the underlying tables referenced by the view changed)
Type[] types = scope.getRelationType().getAllFields().stream().map(Field::getType).toArray(Type[]::new);
RelationPlan withCoercions = addCoercions(subPlan, types);
return new RelationPlan(withCoercions.getRoot(), scope, withCoercions.getFieldMappings());
}
TableHandle handle = analysis.getTableHandle(node);
ImmutableList.Builder<Symbol> outputSymbolsBuilder = ImmutableList.builder();
ImmutableMap.Builder<Symbol, ColumnHandle> columns = ImmutableMap.builder();
for (Field field : scope.getRelationType().getAllFields()) {
Symbol symbol = symbolAllocator.newSymbol(field.getName().get(), field.getType());
outputSymbolsBuilder.add(symbol);
columns.put(symbol, analysis.getColumn(field));
}
List<Symbol> outputSymbols = outputSymbolsBuilder.build();
PlanNode root = new TableScanNode(idAllocator.getNextId(), handle, outputSymbols, columns.build(), Optional.empty(), TupleDomain.all(), null);
return new RelationPlan(root, scope, outputSymbols);
}
@Override
protected RelationPlan visitAliasedRelation(AliasedRelation node, Void context)
{
RelationPlan subPlan = process(node.getRelation(), context);
return new RelationPlan(subPlan.getRoot(), analysis.getScope(node), subPlan.getFieldMappings());
}
@Override
protected RelationPlan visitSampledRelation(SampledRelation node, Void context)
{
RelationPlan subPlan = process(node.getRelation(), context);
double ratio = analysis.getSampleRatio(node);
PlanNode planNode = new SampleNode(idAllocator.getNextId(),
subPlan.getRoot(),
ratio,
SampleNode.Type.fromType(node.getType()));
return new RelationPlan(planNode, analysis.getScope(node), subPlan.getFieldMappings());
}
@Override
protected RelationPlan visitJoin(Join node, Void context)
{
// TODO: translate the RIGHT join into a mirrored LEFT join when we refactor (@martint)
RelationPlan leftPlan = process(node.getLeft(), context);
Optional<Unnest> unnest = getUnnest(node.getRight());
if (unnest.isPresent()) {
if (node.getType() != Join.Type.CROSS && node.getType() != Join.Type.IMPLICIT) {
throw notSupportedException(unnest.get(), "UNNEST on other than the right side of CROSS JOIN");
}
return planCrossJoinUnnest(leftPlan, node, unnest.get());
}
Optional<Lateral> lateral = getLateral(node.getRight());
if (lateral.isPresent()) {
if (node.getType() != Join.Type.CROSS && node.getType() != Join.Type.IMPLICIT) {
throw notSupportedException(lateral.get(), "LATERAL on other than the right side of CROSS JOIN");
}
return planLateralJoin(node, leftPlan, lateral.get());
}
RelationPlan rightPlan = process(node.getRight(), context);
if (!SystemSessionProperties.isLegacyJoinUsingEnabled(session) &&
node.getCriteria().isPresent() && node.getCriteria().get() instanceof JoinUsing) {
return planJoinUsing(node, leftPlan, rightPlan);
}
PlanBuilder leftPlanBuilder = initializePlanBuilder(leftPlan);
PlanBuilder rightPlanBuilder = initializePlanBuilder(rightPlan);
// NOTE: symbols must be in the same order as the outputDescriptor
List<Symbol> outputSymbols = ImmutableList.<Symbol>builder()
.addAll(leftPlan.getFieldMappings())
.addAll(rightPlan.getFieldMappings())
.build();
ImmutableList.Builder<JoinNode.EquiJoinClause> equiClauses = ImmutableList.builder();
List<Expression> complexJoinExpressions = new ArrayList<>();
List<Expression> postInnerJoinConditions = new ArrayList<>();
if (node.getType() != Join.Type.CROSS && node.getType() != Join.Type.IMPLICIT) {
Expression criteria = analysis.getJoinCriteria(node);
RelationType left = analysis.getOutputDescriptor(node.getLeft());
RelationType right = analysis.getOutputDescriptor(node.getRight());
List<Expression> leftComparisonExpressions = new ArrayList<>();
List<Expression> rightComparisonExpressions = new ArrayList<>();
List<ComparisonExpressionType> joinConditionComparisonTypes = new ArrayList<>();
for (Expression conjunct : ExpressionUtils.extractConjuncts(criteria)) {
conjunct = ExpressionUtils.normalize(conjunct);
if (!isEqualComparisonExpression(conjunct) && node.getType() != INNER) {
complexJoinExpressions.add(conjunct);
continue;
}
Set<QualifiedName> dependencies = SymbolsExtractor.extractNames(conjunct, analysis.getColumnReferences());
boolean isJoinUsing = node.getCriteria().filter(JoinUsing.class::isInstance).isPresent();
checkState(!isJoinUsing || SystemSessionProperties.isLegacyJoinUsingEnabled(session));
if (!isJoinUsing && (dependencies.stream().allMatch(left::canResolve) || dependencies.stream().allMatch(right::canResolve))) {
// If the conjunct can be evaluated entirely with the inputs on either side of the join, add
// it to the list complex expressions and let the optimizers figure out how to push it down later.
// Due to legacy reasons, the expression for "join using" looks like "x = x", which (incorrectly)
// appears to fit the condition we're after. So we skip them.
complexJoinExpressions.add(conjunct);
}
else if (conjunct instanceof ComparisonExpression) {
Expression firstExpression = ((ComparisonExpression) conjunct).getLeft();
Expression secondExpression = ((ComparisonExpression) conjunct).getRight();
ComparisonExpressionType comparisonType = ((ComparisonExpression) conjunct).getType();
Set<QualifiedName> firstDependencies = SymbolsExtractor.extractNames(firstExpression, analysis.getColumnReferences());
Set<QualifiedName> secondDependencies = SymbolsExtractor.extractNames(secondExpression, analysis.getColumnReferences());
if (firstDependencies.stream().allMatch(left::canResolve) && secondDependencies.stream().allMatch(right::canResolve)) {
leftComparisonExpressions.add(firstExpression);
rightComparisonExpressions.add(secondExpression);
joinConditionComparisonTypes.add(comparisonType);
}
else if (firstDependencies.stream().allMatch(right::canResolve) && secondDependencies.stream().allMatch(left::canResolve)) {
leftComparisonExpressions.add(secondExpression);
rightComparisonExpressions.add(firstExpression);
joinConditionComparisonTypes.add(comparisonType.flip());
}
else {
// the case when we mix symbols from both left and right join side on either side of condition.
complexJoinExpressions.add(conjunct);
}
}
else {
complexJoinExpressions.add(conjunct);
}
}
leftPlanBuilder = subqueryPlanner.handleSubqueries(leftPlanBuilder, leftComparisonExpressions, node);
rightPlanBuilder = subqueryPlanner.handleSubqueries(rightPlanBuilder, rightComparisonExpressions, node);
// Add projections for join criteria
leftPlanBuilder = leftPlanBuilder.appendProjections(leftComparisonExpressions, symbolAllocator, idAllocator);
rightPlanBuilder = rightPlanBuilder.appendProjections(rightComparisonExpressions, symbolAllocator, idAllocator);
for (int i = 0; i < leftComparisonExpressions.size(); i++) {
if (joinConditionComparisonTypes.get(i) == ComparisonExpressionType.EQUAL) {
Symbol leftSymbol = leftPlanBuilder.translate(leftComparisonExpressions.get(i));
Symbol rightSymbol = rightPlanBuilder.translate(rightComparisonExpressions.get(i));
equiClauses.add(new JoinNode.EquiJoinClause(leftSymbol, rightSymbol));
}
else {
Expression leftExpression = leftPlanBuilder.rewrite(leftComparisonExpressions.get(i));
Expression rightExpression = rightPlanBuilder.rewrite(rightComparisonExpressions.get(i));
postInnerJoinConditions.add(new ComparisonExpression(joinConditionComparisonTypes.get(i), leftExpression, rightExpression));
}
}
}
PlanNode root = new JoinNode(idAllocator.getNextId(),
JoinNode.Type.typeConvert(node.getType()),
leftPlanBuilder.getRoot(),
rightPlanBuilder.getRoot(),
equiClauses.build(),
ImmutableList.<Symbol>builder()
.addAll(leftPlanBuilder.getRoot().getOutputSymbols())
.addAll(rightPlanBuilder.getRoot().getOutputSymbols())
.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty());
if (node.getType() != INNER) {
for (Expression complexExpression : complexJoinExpressions) {
Set<InPredicate> inPredicates = subqueryPlanner.collectInPredicateSubqueries(complexExpression, node);
if (!inPredicates.isEmpty()) {
InPredicate inPredicate = Iterables.getLast(inPredicates);
throw notSupportedException(inPredicate, "IN with subquery predicate in join condition");
}
}
// subqueries can be applied only to one side of join - left side is selected in arbitrary way
leftPlanBuilder = subqueryPlanner.handleUncorrelatedSubqueries(leftPlanBuilder, complexJoinExpressions, node);
}
RelationPlan intermediateRootRelationPlan = new RelationPlan(root, analysis.getScope(node), outputSymbols);
TranslationMap translationMap = new TranslationMap(intermediateRootRelationPlan, analysis, lambdaDeclarationToSymbolMap);
translationMap.setFieldMappings(outputSymbols);
translationMap.putExpressionMappingsFrom(leftPlanBuilder.getTranslations());
translationMap.putExpressionMappingsFrom(rightPlanBuilder.getTranslations());
if (node.getType() != INNER && !complexJoinExpressions.isEmpty()) {
Expression joinedFilterCondition = ExpressionUtils.and(complexJoinExpressions);
joinedFilterCondition = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), joinedFilterCondition);
Expression rewrittenFilterCondition = translationMap.rewrite(joinedFilterCondition);
root = new JoinNode(idAllocator.getNextId(),
JoinNode.Type.typeConvert(node.getType()),
leftPlanBuilder.getRoot(),
rightPlanBuilder.getRoot(),
equiClauses.build(),
ImmutableList.<Symbol>builder()
.addAll(leftPlanBuilder.getRoot().getOutputSymbols())
.addAll(rightPlanBuilder.getRoot().getOutputSymbols())
.build(),
Optional.of(rewrittenFilterCondition),
Optional.empty(),
Optional.empty(),
Optional.empty());
}
if (node.getType() == INNER) {
// rewrite all the other conditions using output symbols from left + right plan node.
PlanBuilder rootPlanBuilder = new PlanBuilder(translationMap, root, analysis.getParameters());
rootPlanBuilder = subqueryPlanner.handleSubqueries(rootPlanBuilder, complexJoinExpressions, node);
for (Expression expression : complexJoinExpressions) {
expression = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), expression);
postInnerJoinConditions.add(rootPlanBuilder.rewrite(expression));
}
root = rootPlanBuilder.getRoot();
Expression postInnerJoinCriteria;
if (!postInnerJoinConditions.isEmpty()) {
postInnerJoinCriteria = ExpressionUtils.and(postInnerJoinConditions);
root = new FilterNode(idAllocator.getNextId(), root, postInnerJoinCriteria);
}
}
return new RelationPlan(root, analysis.getScope(node), outputSymbols);
}
private RelationPlan planJoinUsing(Join node, RelationPlan left, RelationPlan right)
{
/* Given: l JOIN r USING (k1, ..., kn)
produces:
- project
coalesce(l.k1, r.k1)
...,
coalesce(l.kn, r.kn)
l.v1,
...,
l.vn,
r.v1,
...,
r.vn
- join (l.k1 = r.k1 and ... l.kn = r.kn)
- project
cast(l.k1 as commonType(l.k1, r.k1))
...
- project
cast(rl.k1 as commonType(l.k1, r.k1))
If casts are redundant (due to column type and common type being equal),
they will be removed by optimization passes.
*/
List<Identifier> joinColumns = ((JoinUsing) node.getCriteria().get()).getColumns();
Analysis.JoinUsingAnalysis joinAnalysis = analysis.getJoinUsing(node);
ImmutableList.Builder<JoinNode.EquiJoinClause> clauses = ImmutableList.builder();
Map<Identifier, Symbol> leftJoinColumns = new HashMap<>();
Map<Identifier, Symbol> rightJoinColumns = new HashMap<>();
Assignments.Builder leftCoercions = Assignments.builder();
Assignments.Builder rightCoercions = Assignments.builder();
leftCoercions.putIdentities(left.getRoot().getOutputSymbols());
rightCoercions.putIdentities(right.getRoot().getOutputSymbols());
for (int i = 0; i < joinColumns.size(); i++) {
Identifier identifier = joinColumns.get(i);
Type type = analysis.getType(identifier);
// compute the coercion for the field on the left to the common supertype of left & right
Symbol leftOutput = symbolAllocator.newSymbol(identifier, type);
int leftField = joinAnalysis.getLeftJoinFields().get(i);
leftCoercions.put(leftOutput, new Cast(
left.getSymbol(leftField).toSymbolReference(),
type.getTypeSignature().toString(),
false,
metadata.getTypeManager().isTypeOnlyCoercion(left.getDescriptor().getFieldByIndex(leftField).getType(), type)));
leftJoinColumns.put(identifier, leftOutput);
// compute the coercion for the field on the right to the common supertype of left & right
Symbol rightOutput = symbolAllocator.newSymbol(identifier, type);
int rightField = joinAnalysis.getRightJoinFields().get(i);
rightCoercions.put(rightOutput, new Cast(
right.getSymbol(rightField).toSymbolReference(),
type.getTypeSignature().toString(),
false,
metadata.getTypeManager().isTypeOnlyCoercion(right.getDescriptor().getFieldByIndex(rightField).getType(), type)));
rightJoinColumns.put(identifier, rightOutput);
clauses.add(new JoinNode.EquiJoinClause(leftOutput, rightOutput));
}
ProjectNode leftCoercion = new ProjectNode(idAllocator.getNextId(), left.getRoot(), leftCoercions.build());
ProjectNode rightCoercion = new ProjectNode(idAllocator.getNextId(), right.getRoot(), rightCoercions.build());
JoinNode join = new JoinNode(
idAllocator.getNextId(),
JoinNode.Type.typeConvert(node.getType()),
leftCoercion,
rightCoercion,
clauses.build(),
ImmutableList.<Symbol>builder()
.addAll(leftCoercion.getOutputSymbols())
.addAll(rightCoercion.getOutputSymbols())
.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty());
// Add a projection to produce the outputs of the columns in the USING clause,
// which are defined as coalesce(l.k, r.k)
Assignments.Builder assignments = Assignments.builder();
ImmutableList.Builder<Symbol> outputs = ImmutableList.builder();
for (Identifier column : joinColumns) {
Symbol output = symbolAllocator.newSymbol(column, analysis.getType(column));
outputs.add(output);
assignments.put(output, new CoalesceExpression(
leftJoinColumns.get(column).toSymbolReference(),
rightJoinColumns.get(column).toSymbolReference()));
}
for (int field : joinAnalysis.getOtherLeftFields()) {
Symbol symbol = left.getFieldMappings().get(field);
outputs.add(symbol);
assignments.put(symbol, symbol.toSymbolReference());
}
for (int field : joinAnalysis.getOtherRightFields()) {
Symbol symbol = right.getFieldMappings().get(field);
outputs.add(symbol);
assignments.put(symbol, symbol.toSymbolReference());
}
return new RelationPlan(
new ProjectNode(idAllocator.getNextId(), join, assignments.build()),
analysis.getScope(node),
outputs.build());
}
private Optional<Unnest> getUnnest(Relation relation)
{
if (relation instanceof AliasedRelation) {
return getUnnest(((AliasedRelation) relation).getRelation());
}
if (relation instanceof Unnest) {
return Optional.of((Unnest) relation);
}
return Optional.empty();
}
private Optional<Lateral> getLateral(Relation relation)
{
if (relation instanceof AliasedRelation) {
return getLateral(((AliasedRelation) relation).getRelation());
}
if (relation instanceof Lateral) {
return Optional.of((Lateral) relation);
}
return Optional.empty();
}
private RelationPlan planLateralJoin(Join join, RelationPlan leftPlan, Lateral lateral)
{
RelationPlan rightPlan = process(lateral.getQuery(), null);
PlanBuilder leftPlanBuilder = initializePlanBuilder(leftPlan);
PlanBuilder rightPlanBuilder = initializePlanBuilder(rightPlan);
PlanBuilder planBuilder = subqueryPlanner.appendLateralJoin(leftPlanBuilder, rightPlanBuilder, lateral.getQuery(), true);
List<Symbol> outputSymbols = ImmutableList.<Symbol>builder()
.addAll(leftPlan.getRoot().getOutputSymbols())
.addAll(rightPlan.getRoot().getOutputSymbols())
.build();
return new RelationPlan(planBuilder.getRoot(), analysis.getScope(join), outputSymbols);
}
private static boolean isEqualComparisonExpression(Expression conjunct)
{
return conjunct instanceof ComparisonExpression && ((ComparisonExpression) conjunct).getType() == ComparisonExpressionType.EQUAL;
}
private RelationPlan planCrossJoinUnnest(RelationPlan leftPlan, Join joinNode, Unnest node)
{
RelationType unnestOutputDescriptor = analysis.getOutputDescriptor(node);
// Create symbols for the result of unnesting
ImmutableList.Builder<Symbol> unnestedSymbolsBuilder = ImmutableList.builder();
for (Field field : unnestOutputDescriptor.getVisibleFields()) {
Symbol symbol = symbolAllocator.newSymbol(field);
unnestedSymbolsBuilder.add(symbol);
}
ImmutableList<Symbol> unnestedSymbols = unnestedSymbolsBuilder.build();
// Add a projection for all the unnest arguments
PlanBuilder planBuilder = initializePlanBuilder(leftPlan);
planBuilder = planBuilder.appendProjections(node.getExpressions(), symbolAllocator, idAllocator);
TranslationMap translations = planBuilder.getTranslations();
ProjectNode projectNode = (ProjectNode) planBuilder.getRoot();
ImmutableMap.Builder<Symbol, List<Symbol>> unnestSymbols = ImmutableMap.builder();
UnmodifiableIterator<Symbol> unnestedSymbolsIterator = unnestedSymbols.iterator();
for (Expression expression : node.getExpressions()) {
Type type = analysis.getType(expression);
Symbol inputSymbol = translations.get(expression);
if (type instanceof ArrayType) {
unnestSymbols.put(inputSymbol, ImmutableList.of(unnestedSymbolsIterator.next()));
}
else if (type instanceof MapType) {
unnestSymbols.put(inputSymbol, ImmutableList.of(unnestedSymbolsIterator.next(), unnestedSymbolsIterator.next()));
}
else {
throw new IllegalArgumentException("Unsupported type for UNNEST: " + type);
}
}
Optional<Symbol> ordinalitySymbol = node.isWithOrdinality() ? Optional.of(unnestedSymbolsIterator.next()) : Optional.empty();
checkState(!unnestedSymbolsIterator.hasNext(), "Not all output symbols were matched with input symbols");
UnnestNode unnestNode = new UnnestNode(idAllocator.getNextId(), projectNode, leftPlan.getFieldMappings(), unnestSymbols.build(), ordinalitySymbol);
return new RelationPlan(unnestNode, analysis.getScope(joinNode), unnestNode.getOutputSymbols());
}
@Override
protected RelationPlan visitTableSubquery(TableSubquery node, Void context)
{
return process(node.getQuery(), context);
}
@Override
protected RelationPlan visitQuery(Query node, Void context)
{
return new QueryPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, metadata, session)
.plan(node);
}
@Override
protected RelationPlan visitQuerySpecification(QuerySpecification node, Void context)
{
return new QueryPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, metadata, session)
.plan(node);
}
@Override
protected RelationPlan visitValues(Values node, Void context)
{
Scope scope = analysis.getScope(node);
ImmutableList.Builder<Symbol> outputSymbolsBuilder = ImmutableList.builder();
for (Field field : scope.getRelationType().getVisibleFields()) {
Symbol symbol = symbolAllocator.newSymbol(field);
outputSymbolsBuilder.add(symbol);
}
ImmutableList.Builder<List<Expression>> rows = ImmutableList.builder();
for (Expression row : node.getRows()) {
ImmutableList.Builder<Expression> values = ImmutableList.builder();
if (row instanceof Row) {
List<Expression> items = ((Row) row).getItems();
for (int i = 0; i < items.size(); i++) {
Expression expression = items.get(i);
expression = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), expression);
// TODO: RelationPlanner should not invoke evaluateConstantExpression, which in turn invokes ExpressionInterpreter.
// This should happen in an optimizer.
Object constantValue = evaluateConstantExpression(expression, analysis.getCoercions(), metadata, session, analysis.getColumnReferences(), analysis.getParameters());
values.add(LiteralInterpreter.toExpression(constantValue, scope.getRelationType().getFieldByIndex(i).getType()));
}
}
else {
row = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), row);
// TODO: RelationPlanner should not invoke evaluateConstantExpression, which in turn invokes ExpressionInterpreter.
// This should happen in an optimizer.
Object constantValue = evaluateConstantExpression(row, analysis.getCoercions(), metadata, session, analysis.getColumnReferences(), analysis.getParameters());
values.add(LiteralInterpreter.toExpression(constantValue, scope.getRelationType().getFieldByIndex(0).getType()));
}
rows.add(values.build());
}
ValuesNode valuesNode = new ValuesNode(idAllocator.getNextId(), outputSymbolsBuilder.build(), rows.build());
return new RelationPlan(valuesNode, scope, outputSymbolsBuilder.build());
}
@Override
protected RelationPlan visitUnnest(Unnest node, Void context)
{
Scope scope = analysis.getScope(node);
ImmutableList.Builder<Symbol> outputSymbolsBuilder = ImmutableList.builder();
for (Field field : scope.getRelationType().getVisibleFields()) {
Symbol symbol = symbolAllocator.newSymbol(field);
outputSymbolsBuilder.add(symbol);
}
List<Symbol> unnestedSymbols = outputSymbolsBuilder.build();
// If we got here, then we must be unnesting a constant, and not be in a join (where there could be column references)
ImmutableList.Builder<Symbol> argumentSymbols = ImmutableList.builder();
ImmutableList.Builder<Expression> values = ImmutableList.builder();
ImmutableMap.Builder<Symbol, List<Symbol>> unnestSymbols = ImmutableMap.builder();
Iterator<Symbol> unnestedSymbolsIterator = unnestedSymbols.iterator();
for (Expression expression : node.getExpressions()) {
expression = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), expression);
// TODO: RelationPlanner should not invoke evaluateConstantExpression, which in turn invokes ExpressionInterpreter.
// This should happen in an optimizer.
Object constantValue = evaluateConstantExpression(expression, analysis.getCoercions(), metadata, session, analysis.getColumnReferences(), analysis.getParameters());
Type type = analysis.getType(expression);
values.add(LiteralInterpreter.toExpression(constantValue, type));
Symbol inputSymbol = symbolAllocator.newSymbol(expression, type);
argumentSymbols.add(inputSymbol);
if (type instanceof ArrayType) {
unnestSymbols.put(inputSymbol, ImmutableList.of(unnestedSymbolsIterator.next()));
}
else if (type instanceof MapType) {
unnestSymbols.put(inputSymbol, ImmutableList.of(unnestedSymbolsIterator.next(), unnestedSymbolsIterator.next()));
}
else {
throw new IllegalArgumentException("Unsupported type for UNNEST: " + type);
}
}
Optional<Symbol> ordinalitySymbol = node.isWithOrdinality() ? Optional.of(unnestedSymbolsIterator.next()) : Optional.empty();
checkState(!unnestedSymbolsIterator.hasNext(), "Not all output symbols were matched with input symbols");
ValuesNode valuesNode = new ValuesNode(idAllocator.getNextId(), argumentSymbols.build(), ImmutableList.of(values.build()));
UnnestNode unnestNode = new UnnestNode(idAllocator.getNextId(), valuesNode, ImmutableList.of(), unnestSymbols.build(), ordinalitySymbol);
return new RelationPlan(unnestNode, scope, unnestedSymbols);
}
private RelationPlan processAndCoerceIfNecessary(Relation node, Void context)
{
Type[] coerceToTypes = analysis.getRelationCoercion(node);
RelationPlan plan = this.process(node, context);
if (coerceToTypes == null) {
return plan;
}
return addCoercions(plan, coerceToTypes);
}
private RelationPlan addCoercions(RelationPlan plan, Type[] targetColumnTypes)
{
List<Symbol> oldSymbols = plan.getFieldMappings();
RelationType oldDescriptor = plan.getDescriptor().withOnlyVisibleFields();
verify(targetColumnTypes.length == oldSymbols.size());
ImmutableList.Builder<Symbol> newSymbols = new ImmutableList.Builder<>();
Field[] newFields = new Field[targetColumnTypes.length];
Assignments.Builder assignments = Assignments.builder();
for (int i = 0; i < targetColumnTypes.length; i++) {
Symbol inputSymbol = oldSymbols.get(i);
Type inputType = symbolAllocator.getTypes().get(inputSymbol);
Type outputType = targetColumnTypes[i];
if (!outputType.equals(inputType)) {
Expression cast = new Cast(inputSymbol.toSymbolReference(), outputType.getTypeSignature().toString());
Symbol outputSymbol = symbolAllocator.newSymbol(cast, outputType);
assignments.put(outputSymbol, cast);
newSymbols.add(outputSymbol);
}
else {
SymbolReference symbolReference = inputSymbol.toSymbolReference();
Symbol outputSymbol = symbolAllocator.newSymbol(symbolReference, outputType);
assignments.put(outputSymbol, symbolReference);
newSymbols.add(outputSymbol);
}
Field oldField = oldDescriptor.getFieldByIndex(i);
newFields[i] = new Field(
oldField.getRelationAlias(),
oldField.getName(),
targetColumnTypes[i],
oldField.isHidden(),
oldField.getOriginTable(),
oldField.isAliased());
}
ProjectNode projectNode = new ProjectNode(idAllocator.getNextId(), plan.getRoot(), assignments.build());
return new RelationPlan(projectNode, Scope.builder().withRelationType(RelationId.anonymous(), new RelationType(newFields)).build(), newSymbols.build());
}
@Override
protected RelationPlan visitUnion(Union node, Void context)
{
checkArgument(!node.getRelations().isEmpty(), "No relations specified for UNION");
SetOperationPlan setOperationPlan = process(node);
PlanNode planNode = new UnionNode(idAllocator.getNextId(), setOperationPlan.getSources(), setOperationPlan.getSymbolMapping(), ImmutableList.copyOf(setOperationPlan.getSymbolMapping().keySet()));
if (node.isDistinct()) {
planNode = distinct(planNode);
}
return new RelationPlan(planNode, analysis.getScope(node), planNode.getOutputSymbols());
}
@Override
protected RelationPlan visitIntersect(Intersect node, Void context)
{
checkArgument(!node.getRelations().isEmpty(), "No relations specified for INTERSECT");
SetOperationPlan setOperationPlan = process(node);
PlanNode planNode = new IntersectNode(idAllocator.getNextId(), setOperationPlan.getSources(), setOperationPlan.getSymbolMapping(), ImmutableList.copyOf(setOperationPlan.getSymbolMapping().keySet()));
return new RelationPlan(planNode, analysis.getScope(node), planNode.getOutputSymbols());
}
@Override
protected RelationPlan visitExcept(Except node, Void context)
{
checkArgument(!node.getRelations().isEmpty(), "No relations specified for EXCEPT");
SetOperationPlan setOperationPlan = process(node);
PlanNode planNode = new ExceptNode(idAllocator.getNextId(), setOperationPlan.getSources(), setOperationPlan.getSymbolMapping(), ImmutableList.copyOf(setOperationPlan.getSymbolMapping().keySet()));
return new RelationPlan(planNode, analysis.getScope(node), planNode.getOutputSymbols());
}
private SetOperationPlan process(SetOperation node)
{
List<Symbol> outputs = null;
ImmutableList.Builder<PlanNode> sources = ImmutableList.builder();
ImmutableListMultimap.Builder<Symbol, Symbol> symbolMapping = ImmutableListMultimap.builder();
List<RelationPlan> subPlans = node.getRelations().stream()
.map(relation -> processAndCoerceIfNecessary(relation, null))
.collect(toImmutableList());
for (RelationPlan relationPlan : subPlans) {
List<Symbol> childOutputSymbols = relationPlan.getFieldMappings();
if (outputs == null) {
// Use the first Relation to derive output symbol names
RelationType descriptor = relationPlan.getDescriptor();
ImmutableList.Builder<Symbol> outputSymbolBuilder = ImmutableList.builder();
for (Field field : descriptor.getVisibleFields()) {
int fieldIndex = descriptor.indexOf(field);
Symbol symbol = childOutputSymbols.get(fieldIndex);
outputSymbolBuilder.add(symbolAllocator.newSymbol(symbol.getName(), symbolAllocator.getTypes().get(symbol)));
}
outputs = outputSymbolBuilder.build();
}
RelationType descriptor = relationPlan.getDescriptor();
checkArgument(descriptor.getVisibleFieldCount() == outputs.size(),
"Expected relation to have %s symbols but has %s symbols",
descriptor.getVisibleFieldCount(),
outputs.size());
int fieldId = 0;
for (Field field : descriptor.getVisibleFields()) {
int fieldIndex = descriptor.indexOf(field);
symbolMapping.put(outputs.get(fieldId), childOutputSymbols.get(fieldIndex));
fieldId++;
}
sources.add(relationPlan.getRoot());
}
return new SetOperationPlan(sources.build(), symbolMapping.build());
}
private PlanBuilder initializePlanBuilder(RelationPlan relationPlan)
{
TranslationMap translations = new TranslationMap(relationPlan, analysis, lambdaDeclarationToSymbolMap);
// Make field->symbol mapping from underlying relation plan available for translations
// This makes it possible to rewrite FieldOrExpressions that reference fields from the underlying tuple directly
translations.setFieldMappings(relationPlan.getFieldMappings());
return new PlanBuilder(translations, relationPlan.getRoot(), analysis.getParameters());
}
private PlanNode distinct(PlanNode node)
{
return new AggregationNode(idAllocator.getNextId(),
node,
ImmutableMap.of(),
ImmutableList.of(node.getOutputSymbols()),
AggregationNode.Step.SINGLE,
Optional.empty(),
Optional.empty());
}
private static class SetOperationPlan
{
private final List<PlanNode> sources;
private final ListMultimap<Symbol, Symbol> symbolMapping;
private SetOperationPlan(List<PlanNode> sources, ListMultimap<Symbol, Symbol> symbolMapping)
{
this.sources = sources;
this.symbolMapping = symbolMapping;
}
public List<PlanNode> getSources()
{
return sources;
}
public ListMultimap<Symbol, Symbol> getSymbolMapping()
{
return symbolMapping;
}
}
}
| apache-2.0 |
ibatis-dao/metadict | src/das/dao/sort/ISortOrder.java | 1212 | /*
* Copyright 2015 serg.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package das.dao.sort;
/**
* список сортировки в объектах доступа к данным
* @author serg
*/
public interface ISortOrder {
public enum Direction {
ASC, DESC, NONE
}
/* возвращает имя поля с направлением сортировки */
String build();
int size();
boolean isSortable(int index);
String getName(int index);
ISortOrder.Direction getDirection(int index);
void add(String columnName, ISortOrder.Direction direction);
void toggle(int index);
boolean del(int index);
void clear();
}
| apache-2.0 |
vam-google/google-cloud-java | google-cloud-clients/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/VmEndpointNatMappingsList.java | 10483 | /*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1;
import com.google.api.core.BetaApi;
import com.google.api.gax.httpjson.ApiMessage;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import javax.annotation.Generated;
import javax.annotation.Nullable;
@Generated("by GAPIC")
@BetaApi
/** Contains a list of VmEndpointNatMappings. */
public final class VmEndpointNatMappingsList implements ApiMessage {
private final String id;
private final String kind;
private final String nextPageToken;
private final List<VmEndpointNatMappings> result;
private final String selfLink;
private final Warning warning;
private VmEndpointNatMappingsList() {
this.id = null;
this.kind = null;
this.nextPageToken = null;
this.result = null;
this.selfLink = null;
this.warning = null;
}
private VmEndpointNatMappingsList(
String id,
String kind,
String nextPageToken,
List<VmEndpointNatMappings> result,
String selfLink,
Warning warning) {
this.id = id;
this.kind = kind;
this.nextPageToken = nextPageToken;
this.result = result;
this.selfLink = selfLink;
this.warning = warning;
}
@Override
public Object getFieldValue(String fieldName) {
if ("id".equals(fieldName)) {
return id;
}
if ("kind".equals(fieldName)) {
return kind;
}
if ("nextPageToken".equals(fieldName)) {
return nextPageToken;
}
if ("result".equals(fieldName)) {
return result;
}
if ("selfLink".equals(fieldName)) {
return selfLink;
}
if ("warning".equals(fieldName)) {
return warning;
}
return null;
}
@Nullable
@Override
public ApiMessage getApiMessageRequestBody() {
return null;
}
@Nullable
@Override
/**
* The fields that should be serialized (even if they have empty values). If the containing
* message object has a non-null fieldmask, then all the fields in the field mask (and only those
* fields in the field mask) will be serialized. If the containing object does not have a
* fieldmask, then only non-empty fields will be serialized.
*/
public List<String> getFieldMask() {
return null;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
*/
public String getId() {
return id;
}
/**
* [Output Only] Type of resource. Always compute#vmEndpointNatMappingsList for lists of Nat
* mappings of VM endpoints.
*/
public String getKind() {
return kind;
}
/**
* [Output Only] This token allows you to get the next page of results for list requests. If the
* number of results is larger than maxResults, use the nextPageToken as a value for the query
* parameter pageToken in the next list request. Subsequent list requests will have their own
* nextPageToken to continue paging through the results.
*/
public String getNextPageToken() {
return nextPageToken;
}
/** [Output Only] A list of Nat mapping information of VM endpoints. */
public List<VmEndpointNatMappings> getResultList() {
return result;
}
/** [Output Only] Server-defined URL for this resource. */
public String getSelfLink() {
return selfLink;
}
/** [Output Only] Informational warning message. */
public Warning getWarning() {
return warning;
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(VmEndpointNatMappingsList prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
public static VmEndpointNatMappingsList getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final VmEndpointNatMappingsList DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new VmEndpointNatMappingsList();
}
public static class Builder {
private String id;
private String kind;
private String nextPageToken;
private List<VmEndpointNatMappings> result;
private String selfLink;
private Warning warning;
Builder() {}
public Builder mergeFrom(VmEndpointNatMappingsList other) {
if (other == VmEndpointNatMappingsList.getDefaultInstance()) return this;
if (other.getId() != null) {
this.id = other.id;
}
if (other.getKind() != null) {
this.kind = other.kind;
}
if (other.getNextPageToken() != null) {
this.nextPageToken = other.nextPageToken;
}
if (other.getResultList() != null) {
this.result = other.result;
}
if (other.getSelfLink() != null) {
this.selfLink = other.selfLink;
}
if (other.getWarning() != null) {
this.warning = other.warning;
}
return this;
}
Builder(VmEndpointNatMappingsList source) {
this.id = source.id;
this.kind = source.kind;
this.nextPageToken = source.nextPageToken;
this.result = source.result;
this.selfLink = source.selfLink;
this.warning = source.warning;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the
* server.
*/
public String getId() {
return id;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the
* server.
*/
public Builder setId(String id) {
this.id = id;
return this;
}
/**
* [Output Only] Type of resource. Always compute#vmEndpointNatMappingsList for lists of Nat
* mappings of VM endpoints.
*/
public String getKind() {
return kind;
}
/**
* [Output Only] Type of resource. Always compute#vmEndpointNatMappingsList for lists of Nat
* mappings of VM endpoints.
*/
public Builder setKind(String kind) {
this.kind = kind;
return this;
}
/**
* [Output Only] This token allows you to get the next page of results for list requests. If the
* number of results is larger than maxResults, use the nextPageToken as a value for the query
* parameter pageToken in the next list request. Subsequent list requests will have their own
* nextPageToken to continue paging through the results.
*/
public String getNextPageToken() {
return nextPageToken;
}
/**
* [Output Only] This token allows you to get the next page of results for list requests. If the
* number of results is larger than maxResults, use the nextPageToken as a value for the query
* parameter pageToken in the next list request. Subsequent list requests will have their own
* nextPageToken to continue paging through the results.
*/
public Builder setNextPageToken(String nextPageToken) {
this.nextPageToken = nextPageToken;
return this;
}
/** [Output Only] A list of Nat mapping information of VM endpoints. */
public List<VmEndpointNatMappings> getResultList() {
return result;
}
/** [Output Only] A list of Nat mapping information of VM endpoints. */
public Builder addAllResult(List<VmEndpointNatMappings> result) {
if (this.result == null) {
this.result = new LinkedList<>();
}
this.result.addAll(result);
return this;
}
/** [Output Only] A list of Nat mapping information of VM endpoints. */
public Builder addResult(VmEndpointNatMappings result) {
if (this.result == null) {
this.result = new LinkedList<>();
}
this.result.add(result);
return this;
}
/** [Output Only] Server-defined URL for this resource. */
public String getSelfLink() {
return selfLink;
}
/** [Output Only] Server-defined URL for this resource. */
public Builder setSelfLink(String selfLink) {
this.selfLink = selfLink;
return this;
}
/** [Output Only] Informational warning message. */
public Warning getWarning() {
return warning;
}
/** [Output Only] Informational warning message. */
public Builder setWarning(Warning warning) {
this.warning = warning;
return this;
}
public VmEndpointNatMappingsList build() {
return new VmEndpointNatMappingsList(id, kind, nextPageToken, result, selfLink, warning);
}
public Builder clone() {
Builder newBuilder = new Builder();
newBuilder.setId(this.id);
newBuilder.setKind(this.kind);
newBuilder.setNextPageToken(this.nextPageToken);
newBuilder.addAllResult(this.result);
newBuilder.setSelfLink(this.selfLink);
newBuilder.setWarning(this.warning);
return newBuilder;
}
}
@Override
public String toString() {
return "VmEndpointNatMappingsList{"
+ "id="
+ id
+ ", "
+ "kind="
+ kind
+ ", "
+ "nextPageToken="
+ nextPageToken
+ ", "
+ "result="
+ result
+ ", "
+ "selfLink="
+ selfLink
+ ", "
+ "warning="
+ warning
+ "}";
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof VmEndpointNatMappingsList) {
VmEndpointNatMappingsList that = (VmEndpointNatMappingsList) o;
return Objects.equals(this.id, that.getId())
&& Objects.equals(this.kind, that.getKind())
&& Objects.equals(this.nextPageToken, that.getNextPageToken())
&& Objects.equals(this.result, that.getResultList())
&& Objects.equals(this.selfLink, that.getSelfLink())
&& Objects.equals(this.warning, that.getWarning());
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(id, kind, nextPageToken, result, selfLink, warning);
}
}
| apache-2.0 |
christophd/citrus | validation/citrus-validation-xml/src/main/java/com/consol/citrus/config/xml/XsdSchemaParser.java | 982 | package com.consol.citrus.config.xml;
import com.consol.citrus.config.util.BeanDefinitionParserUtils;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.xml.xsd.SimpleXsdSchema;
import org.w3c.dom.Element;
/**
* @author Christoph Deppisch
*/
public class XsdSchemaParser extends AbstractBeanDefinitionParser {
@Override
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
String location = element.getAttribute("location");
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(SimpleXsdSchema.class);
BeanDefinitionParserUtils.setPropertyValue(builder, location, "xsd");
return builder.getBeanDefinition();
}
}
| apache-2.0 |
heckjiang/HSSM | youxuele-web/src/main/java/com/youxuele/dto/BaseResult.java | 1524 | package com.youxuele.dto;
import com.fasterxml.jackson.annotation.JsonInclude;
import java.io.Serializable;
/**
*
* @author jiangjun
*
* ajax 请求的返回类型封装JSON结果
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
public class BaseResult<T> implements Serializable {
private static final long serialVersionUID = -4185151304730685014L;
private boolean success;
private T data;
private int errorCode = -1;
private String errorMsg;
public BaseResult(boolean success, int errorCode, String errorMsg) {
this.success = success;
this.errorCode = errorCode;
this.errorMsg = errorMsg;
}
public BaseResult(int errorCode, String errorMsg) {
this(false, errorCode, errorMsg);
}
public BaseResult(boolean success, T data) {
this.success = success;
this.data = data;
}
public BaseResult(T data) {
this(true, data);
}
public boolean isSuccess() {
return success;
}
public void setSuccess(boolean success) {
this.success = success;
}
public T getData() {
return data;
}
public void setData(T data) {
this.data = data;
}
public String getErrorMsg() {
return errorMsg;
}
public void setErrorMsg(String errorMsg) {
this.errorMsg = errorMsg;
}
public int getErrorCode() {
return errorCode;
}
public void setErrorCode(int errorCode) {
this.errorCode = errorCode;
}
@Override
public String toString() {
return "BaseResult [success=" + success + ", data=" + data
+ ", errorCode=" + errorCode + ", errorMsg=" + errorMsg + "]";
}
}
| apache-2.0 |
redlink-gmbh/smarti | core/src/main/java/io/redlink/smarti/intend/PerimeterTemplateDefinition.java | 2622 | /*
* Copyright 2017 Redlink GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package io.redlink.smarti.intend;
import io.redlink.smarti.model.MessageTopic;
import io.redlink.smarti.model.Slot;
import io.redlink.smarti.model.TemplateDefinition;
import io.redlink.smarti.model.Token;
import io.redlink.smarti.services.SpeakService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.Collection;
import java.util.List;
import java.util.Set;
@Component
public class PerimeterTemplateDefinition extends TemplateDefinition {
public static final String LOCATION = "location";
public static final String START = "start";
public static final String END = "end";
public static final String WHAT = "what";
@Autowired
private SpeakService speakService;
public PerimeterTemplateDefinition() {
super(MessageTopic.Umkreissuche.name());
}
@Override
protected Slot createSlotForName(String name) {
switch (name) {
case LOCATION:
return new Slot(LOCATION, Token.Type.Place,
speakService.getMessage("slot.perimeter."+LOCATION, "Und wo genau?"), true);
case START:
return new Slot(START, Token.Type.Date,
speakService.getMessage("slot.perimeter."+START, "Ab wann?"), false);
case END:
return new Slot(END, Token.Type.Date,
speakService.getMessage("slot.perimeter."+END, "Bis wann?"), false);
case WHAT:
return new Slot(WHAT, null,
speakService.getMessage("slot.perimeter."+WHAT, "Was genau suchst Du?"), true);
default:
log.warn("Unknown QuerySlot '{}' requested for {}", name, getClass().getSimpleName());
return null;
}
}
@Override
protected boolean validate(Collection<Slot> slots, List<Token> tokens) {
final Set<String> present = getPresentAndValidSlots(slots, tokens);
return present.contains(LOCATION) && present.contains(WHAT);
}
}
| apache-2.0 |
joinAero/DroidTurbo | libopencv/src/main/java/cc/eevee/turbo/libopencv/tutorial2/Tutorial2Activity.java | 6028 | package cc.eevee.turbo.libopencv.tutorial2;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.WindowManager;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
import cc.eevee.turbo.libopencv.R;
public class Tutorial2Activity extends AppCompatActivity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private static final int VIEW_MODE_RGBA = 0;
private static final int VIEW_MODE_GRAY = 1;
private static final int VIEW_MODE_CANNY = 2;
private static final int VIEW_MODE_FEATURES = 5;
private int mViewMode;
private Mat mRgba;
private Mat mIntermediateMat;
private Mat mGray;
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny;
private MenuItem mItemPreviewFeatures;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("ocv_mixed_sample");
mOpenCvCameraView.enableView();
} break;
default: {
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial2Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial2_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial2_activity_surface_view);
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "called onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewGray = menu.add("Preview GRAY");
mItemPreviewCanny = menu.add("Canny");
mItemPreviewFeatures = menu.add("Find features");
return true;
}
@Override
public void onPause() {
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume() {
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mIntermediateMat = new Mat(height, width, CvType.CV_8UC4);
mGray = new Mat(height, width, CvType.CV_8UC1);
}
public void onCameraViewStopped() {
mRgba.release();
mGray.release();
mIntermediateMat.release();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
final int viewMode = mViewMode;
switch (viewMode) {
case VIEW_MODE_GRAY:
// input frame has gray scale format
Imgproc.cvtColor(inputFrame.gray(), mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case VIEW_MODE_RGBA:
// input frame has RBGA format
mRgba = inputFrame.rgba();
break;
case VIEW_MODE_CANNY:
// input frame has gray scale format
mRgba = inputFrame.rgba();
Imgproc.Canny(inputFrame.gray(), mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case VIEW_MODE_FEATURES:
// input frame has RGBA format
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
FindFeatures(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr());
break;
}
return mRgba;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemPreviewRGBA) {
mViewMode = VIEW_MODE_RGBA;
} else if (item == mItemPreviewGray) {
mViewMode = VIEW_MODE_GRAY;
} else if (item == mItemPreviewCanny) {
mViewMode = VIEW_MODE_CANNY;
} else if (item == mItemPreviewFeatures) {
mViewMode = VIEW_MODE_FEATURES;
}
return true;
}
public native void FindFeatures(long matAddrGr, long matAddrRgba);
}
| apache-2.0 |
dagnir/aws-sdk-java | aws-java-sdk-iam/src/main/java/com/amazonaws/services/identitymanagement/model/transform/UpdateUserRequestMarshaller.java | 2250 | /*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.identitymanagement.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.DefaultRequest;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.identitymanagement.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.StringUtils;
/**
* UpdateUserRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateUserRequestMarshaller implements Marshaller<Request<UpdateUserRequest>, UpdateUserRequest> {
public Request<UpdateUserRequest> marshall(UpdateUserRequest updateUserRequest) {
if (updateUserRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
Request<UpdateUserRequest> request = new DefaultRequest<UpdateUserRequest>(updateUserRequest, "AmazonIdentityManagement");
request.addParameter("Action", "UpdateUser");
request.addParameter("Version", "2010-05-08");
request.setHttpMethod(HttpMethodName.POST);
if (updateUserRequest.getUserName() != null) {
request.addParameter("UserName", StringUtils.fromString(updateUserRequest.getUserName()));
}
if (updateUserRequest.getNewPath() != null) {
request.addParameter("NewPath", StringUtils.fromString(updateUserRequest.getNewPath()));
}
if (updateUserRequest.getNewUserName() != null) {
request.addParameter("NewUserName", StringUtils.fromString(updateUserRequest.getNewUserName()));
}
return request;
}
}
| apache-2.0 |
huntj88/utexas-utilities | src/com/nasageek/utexasutilities/adapters/CourseMapAdapter.java | 2150 |
package com.nasageek.utexasutilities.adapters;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.nasageek.utexasutilities.MyPair;
import com.nasageek.utexasutilities.R;
import com.nasageek.utexasutilities.model.CourseMapItem;
import java.util.ArrayList;
public class CourseMapAdapter extends ArrayAdapter<MyPair<CourseMapItem, ArrayList>> {
private Context con;
private ArrayList<MyPair<CourseMapItem, ArrayList>> items;
private LayoutInflater li;
public CourseMapAdapter(Context c, ArrayList<MyPair<CourseMapItem, ArrayList>> items) {
super(c, 0, items);
con = c;
this.items = items;
li = (LayoutInflater) con.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
@Override
public int getCount() {
return items.size();
}
@Override
public MyPair<CourseMapItem, ArrayList> getItem(int position) {
return items.get(position);
}
@Override
public long getItemId(int position) {
return 0;
}
@Override
public boolean areAllItemsEnabled() {
return true;
}
@Override
public boolean isEnabled(int i) {
return true;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
Boolean isFolder = true;
MyPair<CourseMapItem, ArrayList> item = items.get(position);
String title = item.first.getName();
if (item.second.size() == 0) {
isFolder = false;
}
ViewGroup lin = (ViewGroup) convertView;
if (lin == null) {
lin = (ViewGroup) li.inflate(R.layout.coursemap_item_view, parent, false);
}
TextView itemName = (TextView) lin.findViewById(R.id.coursemap_item_name);
ImageView folder = (ImageView) lin.findViewById(R.id.coursemap_folder);
folder.setVisibility(isFolder ? View.VISIBLE : View.INVISIBLE);
itemName.setText(title);
return lin;
}
}
| apache-2.0 |
doctorlard/metadata-extractor | src/java/nz/govt/natlib/meta/HarvestStatus.java | 1210 | /*
* Copyright 2006 The National Library of New Zealand
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nz.govt.natlib.meta;
/**
* Describes the status of a Harvest.
*
* @author unascribed
* @version 1.0
*/
public class HarvestStatus {
/**
* The Harvest failed
*/
public static final HarvestStatus ERROR = new HarvestStatus("Error");
/**
* The Harvest was successful
*/
public static final HarvestStatus OK = new HarvestStatus("OK");
/**
* The harvest has not occured or was undetermined
*/
public static final HarvestStatus BLANK = new HarvestStatus("BLANK");
private String name;
private HarvestStatus(String name) {
this.name = name;
}
} | apache-2.0 |
jonvestal/open-kilda | src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/config/MessageProducerConfig.java | 3799 | /* Copyright 2017 Telstra Open Source
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openkilda.northbound.config;
import org.openkilda.messaging.Message;
import org.openkilda.northbound.messaging.MessageProducer;
import org.openkilda.northbound.messaging.kafka.KafkaMessageProducer;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.serializer.JsonSerializer;
import java.util.HashMap;
import java.util.Map;
/**
* Kafka message producer configuration.
*/
@Configuration
@PropertySource("classpath:northbound.properties")
public class MessageProducerConfig {
/**
* Kafka bootstrap servers.
*/
@Value("${kafka.hosts}")
private String kafkaHosts;
/**
* Kafka producer config bean.
* This {@link Map} is used by {@link MessageProducerConfig#producerFactory}.
*
* @return kafka properties bean
*/
private Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaHosts);
props.put(ProducerConfig.RETRIES_CONFIG, 0);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
props.put(ProducerConfig.LINGER_MS_CONFIG, 10);
props.put(ProducerConfig.ACKS_CONFIG, "all");
return props;
}
/**
* Kafka producer factory bean.
* The strategy to produce a {@link org.apache.kafka.clients.producer.Producer} instance
* with {@link MessageProducerConfig#producerConfigs}
* on each {@link org.springframework.kafka.core.DefaultKafkaProducerFactory#createProducer} invocation.
*
* @return kafka producer factory
*/
@Bean
public ProducerFactory<String, Message> producerFactory(ObjectMapper objectMapper) {
return new DefaultKafkaProducerFactory<>(producerConfigs(), new StringSerializer(),
new JsonSerializer<>(objectMapper));
}
/**
* Kafka template bean.
* Wraps {@link org.apache.kafka.clients.producer.KafkaProducer}.
*
* @return kafka template
*/
@Bean
public KafkaTemplate<String, Message> kafkaTemplate(ProducerFactory<String, Message> producerFactory) {
return new KafkaTemplate<>(producerFactory);
}
/**
* Kafka message producer bean.
* Instance of {@link org.openkilda.northbound.messaging.kafka.KafkaMessageProducer}
* contains {@link org.springframework.kafka.core.KafkaTemplate}
* to be used to send messages.
*
* @return kafka message producer
*/
@Bean
public MessageProducer messageProducer() {
return new KafkaMessageProducer();
}
}
| apache-2.0 |
bocekm/SkyControl | SkyControl/Mavlink/src/com/MAVLink/Messages/enums/MAV_AUTOPILOT.java | 2016 | /** Micro air vehicle / autopilot classes. This identifies the individual model.
*/
package com.MAVLink.Messages.enums;
public class MAV_AUTOPILOT {
public static final int MAV_AUTOPILOT_GENERIC = 0; /* Generic autopilot, full support for everything | */
public static final int MAV_AUTOPILOT_PIXHAWK = 1; /* PIXHAWK autopilot, http://pixhawk.ethz.ch | */
public static final int MAV_AUTOPILOT_SLUGS = 2; /* SLUGS autopilot, http://slugsuav.soe.ucsc.edu | */
public static final int MAV_AUTOPILOT_ARDUPILOTMEGA = 3; /* ArduPilotMega / ArduCopter, http://diydrones.com | */
public static final int MAV_AUTOPILOT_OPENPILOT = 4; /* OpenPilot, http://openpilot.org | */
public static final int MAV_AUTOPILOT_GENERIC_WAYPOINTS_ONLY = 5; /* Generic autopilot only supporting simple waypoints | */
public static final int MAV_AUTOPILOT_GENERIC_WAYPOINTS_AND_SIMPLE_NAVIGATION_ONLY = 6; /* Generic autopilot supporting waypoints and other simple navigation commands | */
public static final int MAV_AUTOPILOT_GENERIC_MISSION_FULL = 7; /* Generic autopilot supporting the full mission command set | */
public static final int MAV_AUTOPILOT_INVALID = 8; /* No valid autopilot, e.g. a GCS or other MAVLink component | */
public static final int MAV_AUTOPILOT_PPZ = 9; /* PPZ UAV - http://nongnu.org/paparazzi | */
public static final int MAV_AUTOPILOT_UDB = 10; /* UAV Dev Board | */
public static final int MAV_AUTOPILOT_FP = 11; /* FlexiPilot | */
public static final int MAV_AUTOPILOT_PX4 = 12; /* PX4 Autopilot - http://pixhawk.ethz.ch/px4/ | */
public static final int MAV_AUTOPILOT_SMACCMPILOT = 13; /* SMACCMPilot - http://smaccmpilot.org | */
public static final int MAV_AUTOPILOT_AUTOQUAD = 14; /* AutoQuad -- http://autoquad.org | */
public static final int MAV_AUTOPILOT_ARMAZILA = 15; /* Armazila -- http://armazila.com | */
public static final int MAV_AUTOPILOT_AEROB = 16; /* Aerob -- http://aerob.ru | */
public static final int MAV_AUTOPILOT_ENUM_END = 17; /* | */
}
| apache-2.0 |
bobo159357456/bobo | android_project/ExtendsDemo/app/src/main/java/com/jikexueyuan/extendsdemo/ExtendsDemo03.java | 434 | package com.jikexueyuan.extendsdemo;
/**
* Created by zmzp on 14-12-4.
*/
class Father{
private int age;
private String name;
public Father(){
System.out.println("父类的构造方法");
}
}
class Son extends Father{
public Son(){
System.out.println("子类的构造方法");
}
}
public class ExtendsDemo03 {
public static void main(String[] args) {
Son s = new Son();
}
}
| apache-2.0 |
punkhorn/camel-upstream | components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConsumer.java | 24307 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.kafka;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.regex.Pattern;
import java.util.stream.StreamSupport;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer;
import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.camel.spi.StateRepository;
import org.apache.camel.support.DefaultConsumer;
import org.apache.camel.support.service.ServiceHelper;
import org.apache.camel.support.service.ServiceSupport;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.InterruptException;
import org.apache.kafka.common.header.Header;
public class KafkaConsumer extends DefaultConsumer {
protected ExecutorService executor;
private final KafkaEndpoint endpoint;
private final Processor processor;
private final Long pollTimeoutMs;
// This list helps working around the infinite loop of KAFKA-1894
private final List<KafkaFetchRecords> tasks = new ArrayList<>();
private volatile boolean stopOffsetRepo;
public KafkaConsumer(KafkaEndpoint endpoint, Processor processor) {
super(endpoint, processor);
this.endpoint = endpoint;
this.processor = processor;
this.pollTimeoutMs = endpoint.getConfiguration().getPollTimeoutMs();
// brokers can be configured on endpoint or component level
String brokers = endpoint.getConfiguration().getBrokers();
if (brokers == null) {
brokers = endpoint.getComponent().getBrokers();
}
if (ObjectHelper.isEmpty(brokers)) {
throw new IllegalArgumentException("Brokers must be configured");
}
}
Properties getProps() {
Properties props = endpoint.getConfiguration().createConsumerProperties();
endpoint.updateClassProperties(props);
// brokers can be configured on endpoint or component level
String brokers = endpoint.getConfiguration().getBrokers();
if (brokers == null) {
brokers = endpoint.getComponent().getBrokers();
}
if (brokers == null) {
throw new IllegalArgumentException("URL to the Kafka brokers must be configured with the brokers option on either the component or endpoint.");
}
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers);
if (endpoint.getConfiguration().getGroupId() != null) {
String groupId = endpoint.getConfiguration().getGroupId();
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
log.debug("Kafka consumer groupId is {}", groupId);
} else {
String randomGroupId = UUID.randomUUID().toString();
props.put(ConsumerConfig.GROUP_ID_CONFIG, randomGroupId);
log.debug("Kafka consumer groupId is {} (generated)", randomGroupId);
}
return props;
}
@Override
protected void doStart() throws Exception {
log.info("Starting Kafka consumer on topic: {} with breakOnFirstError: {}",
endpoint.getConfiguration().getTopic(), endpoint.getConfiguration().isBreakOnFirstError());
super.doStart();
// is the offset repository already started?
StateRepository repo = endpoint.getConfiguration().getOffsetRepository();
if (repo instanceof ServiceSupport) {
boolean started = ((ServiceSupport) repo).isStarted();
// if not already started then we would do that and also stop it
if (!started) {
stopOffsetRepo = true;
log.debug("Starting OffsetRepository: {}", repo);
ServiceHelper.startService(endpoint.getConfiguration().getOffsetRepository());
}
}
executor = endpoint.createExecutor();
String topic = endpoint.getConfiguration().getTopic();
Pattern pattern = null;
if (endpoint.getConfiguration().isTopicIsPattern()) {
pattern = Pattern.compile(topic);
}
for (int i = 0; i < endpoint.getConfiguration().getConsumersCount(); i++) {
KafkaFetchRecords task = new KafkaFetchRecords(topic, pattern, i + "", getProps());
// pre-initialize task during startup so if there is any error we have it thrown asap
task.preInit();
executor.submit(task);
tasks.add(task);
}
}
@Override
protected void doStop() throws Exception {
log.info("Stopping Kafka consumer on topic: {}", endpoint.getConfiguration().getTopic());
if (executor != null) {
if (getEndpoint() != null && getEndpoint().getCamelContext() != null) {
getEndpoint().getCamelContext().getExecutorServiceManager().shutdownGraceful(executor);
} else {
executor.shutdownNow();
}
if (!executor.isTerminated()) {
tasks.forEach(KafkaFetchRecords::shutdown);
executor.shutdownNow();
}
}
tasks.clear();
executor = null;
if (stopOffsetRepo) {
StateRepository repo = endpoint.getConfiguration().getOffsetRepository();
log.debug("Stopping OffsetRepository: {}", repo);
ServiceHelper.stopAndShutdownService(repo);
}
super.doStop();
}
class KafkaFetchRecords implements Runnable, ConsumerRebalanceListener {
private org.apache.kafka.clients.consumer.KafkaConsumer consumer;
private final String topicName;
private final Pattern topicPattern;
private final String threadId;
private final Properties kafkaProps;
KafkaFetchRecords(String topicName, Pattern topicPattern, String id, Properties kafkaProps) {
this.topicName = topicName;
this.topicPattern = topicPattern;
this.threadId = topicName + "-" + "Thread " + id;
this.kafkaProps = kafkaProps;
}
@Override
public void run() {
boolean first = true;
boolean reConnect = true;
while (reConnect) {
try {
if (!first) {
// re-initialize on re-connect so we have a fresh consumer
doInit();
}
} catch (Throwable e) {
// ensure this is logged so users can see the problem
log.warn("Error creating org.apache.kafka.clients.consumer.KafkaConsumer due {}", e.getMessage(), e);
}
if (!first) {
// skip one poll timeout before trying again
long delay = endpoint.getConfiguration().getPollTimeoutMs();
log.info("Reconnecting {} to topic {} after {} ms", threadId, topicName, delay);
try {
Thread.sleep(delay);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
first = false;
// doRun keeps running until we either shutdown or is told to re-connect
reConnect = doRun();
}
}
void preInit() {
doInit();
}
protected void doInit() {
// create consumer
ClassLoader threadClassLoader = Thread.currentThread().getContextClassLoader();
try {
// Kafka uses reflection for loading authentication settings, use its classloader
Thread.currentThread().setContextClassLoader(org.apache.kafka.clients.consumer.KafkaConsumer.class.getClassLoader());
// this may throw an exception if something is wrong with kafka consumer
this.consumer = new org.apache.kafka.clients.consumer.KafkaConsumer(kafkaProps);
} finally {
Thread.currentThread().setContextClassLoader(threadClassLoader);
}
}
@SuppressWarnings("unchecked")
protected boolean doRun() {
// allow to re-connect thread in case we use that to retry failed messages
boolean reConnect = false;
boolean unsubscribing = false;
try {
if (topicPattern != null) {
log.info("Subscribing {} to topic pattern {}", threadId, topicName);
consumer.subscribe(topicPattern, this);
} else {
log.info("Subscribing {} to topic {}", threadId, topicName);
consumer.subscribe(Arrays.asList(topicName.split(",")));
}
StateRepository<String, String> offsetRepository = endpoint.getConfiguration().getOffsetRepository();
if (offsetRepository != null) {
// This poll to ensures we have an assigned partition otherwise seek won't work
ConsumerRecords poll = consumer.poll(100);
for (TopicPartition topicPartition : (Set<TopicPartition>) consumer.assignment()) {
String offsetState = offsetRepository.getState(serializeOffsetKey(topicPartition));
if (offsetState != null && !offsetState.isEmpty()) {
// The state contains the last read offset so you need to seek from the next one
long offset = deserializeOffsetValue(offsetState) + 1;
log.debug("Resuming partition {} from offset {} from state", topicPartition.partition(), offset);
consumer.seek(topicPartition, offset);
} else {
// If the init poll has returned some data of a currently unknown topic/partition in the state
// then resume from their offset in order to avoid losing data
List<ConsumerRecord<Object, Object>> partitionRecords = poll.records(topicPartition);
if (!partitionRecords.isEmpty()) {
long offset = partitionRecords.get(0).offset();
log.debug("Resuming partition {} from offset {}", topicPartition.partition(), offset);
consumer.seek(topicPartition, offset);
}
}
}
} else if (endpoint.getConfiguration().getSeekTo() != null) {
if (endpoint.getConfiguration().getSeekTo().equals("beginning")) {
log.debug("{} is seeking to the beginning on topic {}", threadId, topicName);
// This poll to ensures we have an assigned partition otherwise seek won't work
consumer.poll(100);
consumer.seekToBeginning(consumer.assignment());
} else if (endpoint.getConfiguration().getSeekTo().equals("end")) {
log.debug("{} is seeking to the end on topic {}", threadId, topicName);
// This poll to ensures we have an assigned partition otherwise seek won't work
consumer.poll(100);
consumer.seekToEnd(consumer.assignment());
}
}
while (isRunAllowed() && !reConnect && !isStoppingOrStopped() && !isSuspendingOrSuspended()) {
// flag to break out processing on the first exception
boolean breakOnErrorHit = false;
log.trace("Polling {} from topic: {} with timeout: {}", threadId, topicName, pollTimeoutMs);
ConsumerRecords<Object, Object> allRecords = consumer.poll(pollTimeoutMs);
for (TopicPartition partition : allRecords.partitions()) {
long partitionLastOffset = -1;
Iterator<ConsumerRecord<Object, Object>> recordIterator = allRecords.records(partition).iterator();
if (!breakOnErrorHit && recordIterator.hasNext()) {
ConsumerRecord<Object, Object> record;
while (!breakOnErrorHit && recordIterator.hasNext()) {
record = recordIterator.next();
if (log.isTraceEnabled()) {
log.trace("Partition = {}, offset = {}, key = {}, value = {}", record.partition(), record.offset(), record.key(),
record.value());
}
Exchange exchange = endpoint.createKafkaExchange(record);
propagateHeaders(record, exchange, endpoint.getConfiguration());
// if not auto commit then we have additional information on the exchange
if (!isAutoCommitEnabled()) {
exchange.getIn().setHeader(KafkaConstants.LAST_RECORD_BEFORE_COMMIT, !recordIterator.hasNext());
}
if (endpoint.getConfiguration().isAllowManualCommit()) {
// allow Camel users to access the Kafka consumer API to be able to do for example manual commits
KafkaManualCommit manual = endpoint.getComponent().getKafkaManualCommitFactory().newInstance(exchange, consumer, topicName, threadId,
offsetRepository, partition, record.offset());
exchange.getIn().setHeader(KafkaConstants.MANUAL_COMMIT, manual);
}
try {
processor.process(exchange);
} catch (Exception e) {
exchange.setException(e);
}
if (exchange.getException() != null) {
// processing failed due to an unhandled exception, what should we do
if (endpoint.getConfiguration().isBreakOnFirstError()) {
// we are failing and we should break out
log.warn("Error during processing {} from topic: {}. Will seek consumer to offset: {} and re-connect and start polling again.",
exchange, topicName, partitionLastOffset);
// force commit so we resume on next poll where we failed
commitOffset(offsetRepository, partition, partitionLastOffset, true);
// continue to next partition
breakOnErrorHit = true;
} else {
// will handle/log the exception and then continue to next
getExceptionHandler().handleException("Error during processing", exchange, exchange.getException());
}
} else {
// record was success so remember its offset
partitionLastOffset = record.offset();
}
}
if (!breakOnErrorHit) {
// all records processed from partition so commit them
commitOffset(offsetRepository, partition, partitionLastOffset, false);
}
}
}
if (breakOnErrorHit) {
// force re-connect
reConnect = true;
}
}
if (!reConnect) {
if (isAutoCommitEnabled()) {
if ("async".equals(endpoint.getConfiguration().getAutoCommitOnStop())) {
log.info("Auto commitAsync on stop {} from topic {}", threadId, topicName);
consumer.commitAsync();
} else if ("sync".equals(endpoint.getConfiguration().getAutoCommitOnStop())) {
log.info("Auto commitSync on stop {} from topic {}", threadId, topicName);
consumer.commitSync();
}
}
}
log.info("Unsubscribing {} from topic {}", threadId, topicName);
// we are unsubscribing so do not re connect
unsubscribing = true;
consumer.unsubscribe();
} catch (InterruptException e) {
getExceptionHandler().handleException("Interrupted while consuming " + threadId + " from kafka topic", e);
log.info("Unsubscribing {} from topic {}", threadId, topicName);
consumer.unsubscribe();
Thread.currentThread().interrupt();
} catch (KafkaException e) {
// some kind of error in kafka, it may happen during unsubscribing or during normal processing
if (unsubscribing) {
getExceptionHandler().handleException("Error unsubscribing " + threadId + " from kafka topic " + topicName, e);
} else {
log.warn("KafkaException consuming {} from topic {}. Will attempt to re-connect on next run", threadId, topicName);
reConnect = true;
}
} catch (Exception e) {
getExceptionHandler().handleException("Error consuming " + threadId + " from kafka topic", e);
} finally {
log.debug("Closing {}", threadId);
IOHelper.close(consumer);
}
return reConnect;
}
private void commitOffset(StateRepository<String, String> offsetRepository, TopicPartition partition, long partitionLastOffset, boolean forceCommit) {
if (partitionLastOffset != -1) {
if (!endpoint.getConfiguration().isAllowManualCommit() && offsetRepository != null) {
log.debug("Saving offset repository state {} from topic {} with offset: {}", threadId, topicName, partitionLastOffset);
offsetRepository.setState(serializeOffsetKey(partition), serializeOffsetValue(partitionLastOffset));
} else if (forceCommit) {
log.debug("Forcing commitSync {} from topic {} with offset: {}", threadId, topicName, partitionLastOffset);
consumer.commitSync(Collections.singletonMap(partition, new OffsetAndMetadata(partitionLastOffset + 1)));
} else if (endpoint.getConfiguration().isAutoCommitEnable()) {
log.debug("Auto commitSync {} from topic {} with offset: {}", threadId, topicName, partitionLastOffset);
consumer.commitSync(Collections.singletonMap(partition, new OffsetAndMetadata(partitionLastOffset + 1)));
}
}
}
private void shutdown() {
// As advised in the KAFKA-1894 ticket, calling this wakeup method breaks the infinite loop
consumer.wakeup();
}
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
log.debug("onPartitionsRevoked: {} from topic {}", threadId, topicName);
StateRepository<String, String> offsetRepository = endpoint.getConfiguration().getOffsetRepository();
if (offsetRepository != null) {
for (TopicPartition partition : partitions) {
long offset = consumer.position(partition);
log.debug("Saving offset repository state {} from topic {} with offset: {}", threadId, topicName, offset);
offsetRepository.setState(serializeOffsetKey(partition), serializeOffsetValue(offset));
}
}
}
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
log.debug("onPartitionsAssigned: {} from topic {}", threadId, topicName);
StateRepository<String, String> offsetRepository = endpoint.getConfiguration().getOffsetRepository();
if (offsetRepository != null) {
for (TopicPartition partition : partitions) {
String offsetState = offsetRepository.getState(serializeOffsetKey(partition));
if (offsetState != null && !offsetState.isEmpty()) {
// The state contains the last read offset so you need to seek from the next one
long offset = deserializeOffsetValue(offsetState) + 1;
log.debug("Resuming partition {} from offset {} from state", partition.partition(), offset);
consumer.seek(partition, offset);
}
}
}
}
}
private void propagateHeaders(ConsumerRecord<Object, Object> record, Exchange exchange, KafkaConfiguration kafkaConfiguration) {
HeaderFilterStrategy headerFilterStrategy = kafkaConfiguration.getHeaderFilterStrategy();
KafkaHeaderDeserializer headerDeserializer = kafkaConfiguration.getKafkaHeaderDeserializer();
StreamSupport.stream(record.headers().spliterator(), false)
.filter(header -> shouldBeFiltered(header, exchange, headerFilterStrategy))
.forEach(header -> exchange.getIn().setHeader(header.key(), headerDeserializer.deserialize(header.key(), header.value())));
}
private boolean shouldBeFiltered(Header header, Exchange exchange, HeaderFilterStrategy headerFilterStrategy) {
return !headerFilterStrategy.applyFilterToCamelHeaders(header.key(), header.value(), exchange);
}
private boolean isAutoCommitEnabled() {
return endpoint.getConfiguration().isAutoCommitEnable() != null && endpoint.getConfiguration().isAutoCommitEnable();
}
protected String serializeOffsetKey(TopicPartition topicPartition) {
return topicPartition.topic() + '/' + topicPartition.partition();
}
protected String serializeOffsetValue(long offset) {
return String.valueOf(offset);
}
protected long deserializeOffsetValue(String offset) {
return Long.parseLong(offset);
}
}
| apache-2.0 |
tonilopezmr/CategoriPlus | test/pruebas/modelo/CategoriaModelo.java | 3030 | package pruebas.modelo;
/*
* Copyright 2014 alumno.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import categoriplus.controlador.Controlador;
import categoriplus.dataccesobject.gestor.DBConfig;
import categoriplus.modelo.Categoria;
import java.util.List;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.openide.util.Exceptions;
/**
*
* @author alumno
*/
public class CategoriaModelo {
Controlador controller;
public CategoriaModelo() throws Exception {
controller = new Controlador(null, new DBConfig("admin", "admin", "192.168.2.134", "openerp"));
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
// TODO add test methods here.
// The methods must be annotated with annotation @Test. For example:
//
// @Test
// public void hello() {}
@Test
public void getAllCategorias() {
try {
List<Categoria> cats = controller.listarCategorias();
for (Categoria categoria : cats) {
System.out.println(categoria);
}
} catch (Exception ex) {
Exceptions.printStackTrace(ex);
}
}
@Test
public void getCategoria() {
try {
Categoria cat = controller.getCategoria(1);
System.out.println(cat.getId()+" - "+cat.getName()+" - "+cat.getParent());
} catch (Exception ex) {
Exceptions.printStackTrace(ex);
}
}
@Test
public void insertarCategoria(){
try {
Categoria categori = new Categoria();
// categori.setId(10);
categori.setName("Nombre Categoria");
categori.setParent(controller.getCategoria(1));
controller.crearCategoria(categori);
} catch (Exception ex) {
Exceptions.printStackTrace(ex);
}
}
@Test
public void updateCategoria(){
try {
Categoria cat = controller.getCategoria(3);
cat.setName(cat.getName()+" Modificado");
controller.modificarCategoria(cat);
} catch (Exception ex) {
Exceptions.printStackTrace(ex);
}
}
}
| apache-2.0 |
mdanielwork/intellij-community | java/java-impl/src/com/intellij/codeInspection/OptionalIsPresentInspection.java | 22596 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection;
import com.intellij.codeInsight.Nullability;
import com.intellij.codeInspection.dataFlow.NullabilityUtil;
import com.intellij.codeInspection.util.LambdaGenerationUtil;
import com.intellij.codeInspection.util.OptionalUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.SuggestedNameInfo;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiTypesUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ObjectUtils;
import com.siyeh.ig.psiutils.*;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Objects;
import static com.intellij.codeInsight.PsiEquivalenceUtil.areElementsEquivalent;
public class OptionalIsPresentInspection extends AbstractBaseJavaLocalInspectionTool {
private static final Logger LOG = Logger.getInstance(OptionalIsPresentInspection.class);
private static final OptionalIsPresentCase[] CASES = {
new ReturnCase(),
new AssignmentCase(),
new ConsumerCase(),
new TernaryCase()
};
private enum ProblemType {
WARNING, INFO, NONE;
void registerProblem(@NotNull ProblemsHolder holder, @NotNull PsiExpression condition, OptionalIsPresentCase scenario) {
if(this != NONE) {
if (this == INFO && !holder.isOnTheFly()) {
return; //don't register fixes in batch mode
}
holder.registerProblem(condition, "Can be replaced with single expression in functional style",
this == INFO ? ProblemHighlightType.INFORMATION : ProblemHighlightType.GENERIC_ERROR_OR_WARNING,
new OptionalIsPresentFix(scenario));
}
}
}
@NotNull
@Override
public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) {
if (!PsiUtil.isLanguageLevel8OrHigher(holder.getFile())) {
return PsiElementVisitor.EMPTY_VISITOR;
}
return new JavaElementVisitor() {
@Override
public void visitConditionalExpression(@NotNull PsiConditionalExpression expression) {
super.visitConditionalExpression(expression);
PsiExpression condition = PsiUtil.skipParenthesizedExprDown(expression.getCondition());
if (condition == null) return;
boolean invert = false;
PsiExpression strippedCondition = condition;
if (BoolUtils.isNegation(condition)) {
strippedCondition = BoolUtils.getNegated(condition);
invert = true;
}
PsiReferenceExpression optionalRef = extractOptionalFromIsPresentCheck(strippedCondition);
if (optionalRef == null) return;
PsiExpression thenExpression = invert ? expression.getElseExpression() : expression.getThenExpression();
PsiExpression elseExpression = invert ? expression.getThenExpression() : expression.getElseExpression();
check(condition, optionalRef, thenExpression, elseExpression);
}
@Override
public void visitIfStatement(@NotNull PsiIfStatement statement) {
super.visitIfStatement(statement);
PsiExpression condition = PsiUtil.skipParenthesizedExprDown(statement.getCondition());
if (condition == null) return;
boolean invert = false;
PsiExpression strippedCondition = condition;
if (BoolUtils.isNegation(condition)) {
strippedCondition = BoolUtils.getNegated(condition);
invert = true;
}
PsiReferenceExpression optionalRef = extractOptionalFromIsPresentCheck(strippedCondition);
if (optionalRef == null) return;
PsiStatement thenStatement = extractThenStatement(statement, invert);
PsiStatement elseStatement = extractElseStatement(statement, invert);
check(condition, optionalRef, thenStatement, elseStatement);
}
void check(@NotNull PsiExpression condition, PsiReferenceExpression optionalRef, PsiElement thenElement, PsiElement elseElement) {
for (OptionalIsPresentCase scenario : CASES) {
scenario.getProblemType(optionalRef, thenElement, elseElement).registerProblem(holder, condition, scenario);
}
}
};
}
private static boolean isRaw(@NotNull PsiVariable variable) {
PsiType type = variable.getType();
return type instanceof PsiClassType && ((PsiClassType)type).isRaw();
}
@Nullable
private static PsiStatement extractThenStatement(@NotNull PsiIfStatement ifStatement, boolean invert) {
if (invert) return extractElseStatement(ifStatement, false);
return ControlFlowUtils.stripBraces(ifStatement.getThenBranch());
}
@Nullable
private static PsiStatement extractElseStatement(@NotNull PsiIfStatement ifStatement, boolean invert) {
if (invert) return extractThenStatement(ifStatement, false);
PsiStatement statement = ControlFlowUtils.stripBraces(ifStatement.getElseBranch());
if (statement == null) {
PsiStatement thenStatement = extractThenStatement(ifStatement, false);
if (thenStatement instanceof PsiReturnStatement) {
PsiElement nextElement = PsiTreeUtil.skipWhitespacesAndCommentsForward(ifStatement);
if (nextElement instanceof PsiStatement) {
statement = ControlFlowUtils.stripBraces((PsiStatement)nextElement);
}
}
}
return statement;
}
@Nullable
@Contract("null -> null")
static PsiReferenceExpression extractOptionalFromIsPresentCheck(PsiExpression expression) {
if (!(expression instanceof PsiMethodCallExpression)) return null;
PsiMethodCallExpression call = (PsiMethodCallExpression)expression;
if (!call.getArgumentList().isEmpty()) return null;
if (!"isPresent".equals(call.getMethodExpression().getReferenceName())) return null;
PsiMethod method = call.resolveMethod();
if (method == null) return null;
PsiClass containingClass = method.getContainingClass();
if (containingClass == null || !CommonClassNames.JAVA_UTIL_OPTIONAL.equals(containingClass.getQualifiedName())) return null;
PsiReferenceExpression qualifier =
ObjectUtils.tryCast(call.getMethodExpression().getQualifierExpression(), PsiReferenceExpression.class);
if (qualifier == null) return null;
PsiElement element = qualifier.resolve();
if (!(element instanceof PsiVariable) || isRaw((PsiVariable)element)) return null;
return qualifier;
}
@Contract("null, _ -> false")
static boolean isOptionalGetCall(PsiElement element, @NotNull PsiReferenceExpression optionalRef) {
if (!(element instanceof PsiMethodCallExpression)) return false;
PsiMethodCallExpression call = (PsiMethodCallExpression)element;
if (!call.getArgumentList().isEmpty()) return false;
PsiReferenceExpression methodExpression = call.getMethodExpression();
return "get".equals(methodExpression.getReferenceName()) &&
areElementsEquivalent(ExpressionUtils.getQualifierOrThis(methodExpression), optionalRef);
}
@NotNull
static ProblemType getTypeByLambdaCandidate(@NotNull PsiReferenceExpression optionalRef,
@Nullable PsiElement lambdaCandidate,
@Nullable PsiExpression falseExpression) {
if (lambdaCandidate == null) return ProblemType.NONE;
if (lambdaCandidate instanceof PsiReferenceExpression &&
areElementsEquivalent(lambdaCandidate, optionalRef) && OptionalUtil.isOptionalEmptyCall(falseExpression)) {
return ProblemType.WARNING;
}
if (!LambdaGenerationUtil.canBeUncheckedLambda(lambdaCandidate, optionalRef::isReferenceTo)) return ProblemType.NONE;
Ref<Boolean> hasOptionalReference = new Ref<>(Boolean.FALSE);
boolean hasNoBadRefs = PsiTreeUtil.processElements(lambdaCandidate, e -> {
if (!(e instanceof PsiReferenceExpression)) return true;
if (!areElementsEquivalent(e, optionalRef)) return true;
// Check that Optional variable is referenced only in context of get() call
hasOptionalReference.set(Boolean.TRUE);
return isOptionalGetCall(e.getParent().getParent(), optionalRef);
});
if (!hasNoBadRefs) return ProblemType.NONE;
if (!hasOptionalReference.get() || !(lambdaCandidate instanceof PsiExpression)) return ProblemType.INFO;
PsiExpression expression = (PsiExpression)lambdaCandidate;
if (falseExpression != null) {
// falseExpression == null is "consumer" case (to be replaced with ifPresent())
if (!ExpressionUtils.isNullLiteral(falseExpression) &&
NullabilityUtil.getExpressionNullability(expression, true) != Nullability.NOT_NULL) {
// if falseExpression is null literal, then semantics is preserved
return ProblemType.INFO;
}
PsiType falseType = falseExpression.getType();
PsiType trueType = expression.getType();
// like x ? double_expression : integer_expression; support only if integer_expression is simple literal,
// so could be converted explicitly to double
if (falseType instanceof PsiPrimitiveType && trueType instanceof PsiPrimitiveType &&
!falseType.equals(trueType) && JavaPsiMathUtil.getNumberFromLiteral(falseExpression) == null) {
return ProblemType.NONE;
}
}
return ProblemType.WARNING;
}
@NotNull
static String generateOptionalLambda(@NotNull PsiElementFactory factory,
@NotNull CommentTracker ct,
PsiReferenceExpression optionalRef,
PsiElement trueValue) {
PsiType type = optionalRef.getType();
JavaCodeStyleManager javaCodeStyleManager = JavaCodeStyleManager.getInstance(trueValue.getProject());
SuggestedNameInfo info = javaCodeStyleManager.suggestVariableName(VariableKind.PARAMETER, null, null, type);
String baseName = ObjectUtils.coalesce(ArrayUtil.getFirstElement(info.names), "value");
String paramName = javaCodeStyleManager.suggestUniqueVariableName(baseName, trueValue, true);
if(trueValue instanceof PsiExpressionStatement) {
trueValue = ((PsiExpressionStatement)trueValue).getExpression();
}
ct.markUnchanged(trueValue);
PsiElement copy = trueValue.copy();
for (PsiElement getCall : PsiTreeUtil.collectElements(copy, e -> isOptionalGetCall(e, optionalRef))) {
PsiElement result = getCall.replace(factory.createIdentifier(paramName));
if (copy == getCall) copy = result;
}
if(copy instanceof PsiStatement && !(copy instanceof PsiBlockStatement)) {
return paramName + "->{" + copy.getText()+"}";
}
return paramName + "->" + copy.getText();
}
static String generateOptionalUnwrap(@NotNull PsiElementFactory factory,
@NotNull CommentTracker ct,
@NotNull PsiReferenceExpression optionalRef,
@NotNull PsiExpression trueValue,
@NotNull PsiExpression falseValue,
PsiType targetType) {
if (areElementsEquivalent(trueValue, optionalRef) && OptionalUtil.isOptionalEmptyCall(falseValue)) {
trueValue =
factory.createExpressionFromText(CommonClassNames.JAVA_UTIL_OPTIONAL + ".of(" + optionalRef.getText() + ".get())", trueValue);
}
if (areElementsEquivalent(falseValue, optionalRef)) {
falseValue = factory.createExpressionFromText(CommonClassNames.JAVA_UTIL_OPTIONAL + ".empty()", falseValue);
}
String lambdaText = generateOptionalLambda(factory, ct, optionalRef, trueValue);
PsiLambdaExpression lambda = (PsiLambdaExpression)factory.createExpressionFromText(lambdaText, trueValue);
PsiExpression body = Objects.requireNonNull((PsiExpression)lambda.getBody());
return OptionalUtil.generateOptionalUnwrap(optionalRef.getText(), lambda.getParameterList().getParameters()[0],
body, ct.markUnchanged(falseValue), targetType, true);
}
static boolean isSimpleOrUnchecked(PsiExpression expression) {
return ExpressionUtils.isSafelyRecomputableExpression(expression) || LambdaGenerationUtil.canBeUncheckedLambda(expression);
}
static class OptionalIsPresentFix implements LocalQuickFix {
private final OptionalIsPresentCase myScenario;
OptionalIsPresentFix(OptionalIsPresentCase scenario) {
myScenario = scenario;
}
@Nls
@NotNull
@Override
public String getFamilyName() {
return "Replace Optional.isPresent() condition with functional style expression";
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
PsiElement element = descriptor.getStartElement();
if (!(element instanceof PsiExpression)) return;
PsiExpression condition = (PsiExpression)element;
boolean invert = false;
if (BoolUtils.isNegation(condition)) {
condition = BoolUtils.getNegated(condition);
invert = true;
}
PsiReferenceExpression optionalRef = extractOptionalFromIsPresentCheck(condition);
if (optionalRef == null) return;
PsiElement cond = PsiTreeUtil.getParentOfType(element, PsiIfStatement.class, PsiConditionalExpression.class);
PsiElement thenElement;
PsiElement elseElement;
if(cond instanceof PsiIfStatement) {
thenElement = extractThenStatement((PsiIfStatement)cond, invert);
elseElement = extractElseStatement((PsiIfStatement)cond, invert);
} else if(cond instanceof PsiConditionalExpression) {
thenElement = invert ? ((PsiConditionalExpression)cond).getElseExpression() : ((PsiConditionalExpression)cond).getThenExpression();
elseElement = invert ? ((PsiConditionalExpression)cond).getThenExpression() : ((PsiConditionalExpression)cond).getElseExpression();
} else return;
if (myScenario.getProblemType(optionalRef, thenElement, elseElement) == ProblemType.NONE) return;
PsiElementFactory factory = JavaPsiFacade.getElementFactory(project);
CommentTracker ct = new CommentTracker();
String replacementText = myScenario.generateReplacement(factory, ct, optionalRef, thenElement, elseElement);
if (thenElement != null && !PsiTreeUtil.isAncestor(cond, thenElement, true)) ct.delete(thenElement);
if (elseElement != null && !PsiTreeUtil.isAncestor(cond, elseElement, true)) ct.delete(elseElement);
PsiElement result = ct.replaceAndRestoreComments(cond, replacementText);
LambdaCanBeMethodReferenceInspection.replaceAllLambdasWithMethodReferences(result);
RemoveRedundantTypeArgumentsUtil.removeRedundantTypeArguments(result);
CodeStyleManager.getInstance(project).reformat(result);
}
}
interface OptionalIsPresentCase {
@NotNull
ProblemType getProblemType(@NotNull PsiReferenceExpression optionalVariable,
@Nullable PsiElement trueElement,
@Nullable PsiElement falseElement);
@NotNull
String generateReplacement(@NotNull PsiElementFactory factory,
@NotNull CommentTracker ct,
@NotNull PsiReferenceExpression optionalVariable,
PsiElement trueElement,
PsiElement falseElement);
}
static class ReturnCase implements OptionalIsPresentCase {
@NotNull
@Override
public ProblemType getProblemType(@NotNull PsiReferenceExpression optionalRef,
@Nullable PsiElement trueElement,
@Nullable PsiElement falseElement) {
if (!(trueElement instanceof PsiReturnStatement) || !(falseElement instanceof PsiReturnStatement)) return ProblemType.NONE;
PsiExpression falseValue = ((PsiReturnStatement)falseElement).getReturnValue();
PsiExpression trueValue = ((PsiReturnStatement)trueElement).getReturnValue();
if (!isSimpleOrUnchecked(falseValue)) return ProblemType.NONE;
return getTypeByLambdaCandidate(optionalRef, trueValue, falseValue);
}
@NotNull
@Override
public String generateReplacement(@NotNull PsiElementFactory factory,
@NotNull CommentTracker ct, @NotNull PsiReferenceExpression optionalVariable,
PsiElement trueElement,
PsiElement falseElement) {
PsiExpression trueValue = ((PsiReturnStatement)trueElement).getReturnValue();
PsiExpression falseValue = ((PsiReturnStatement)falseElement).getReturnValue();
LOG.assertTrue(trueValue != null);
LOG.assertTrue(falseValue != null);
return "return " +
generateOptionalUnwrap(factory, ct, optionalVariable, trueValue, falseValue, PsiTypesUtil.getMethodReturnType(trueElement)) +
";";
}
}
static class AssignmentCase implements OptionalIsPresentCase {
@NotNull
@Override
public ProblemType getProblemType(@NotNull PsiReferenceExpression optionalVariable,
@Nullable PsiElement trueElement,
@Nullable PsiElement falseElement) {
PsiAssignmentExpression trueAssignment = ExpressionUtils.getAssignment(trueElement);
PsiAssignmentExpression falseAssignment = ExpressionUtils.getAssignment(falseElement);
if (trueAssignment == null || falseAssignment == null) return ProblemType.NONE;
PsiExpression falseVal = falseAssignment.getRExpression();
PsiExpression trueVal = trueAssignment.getRExpression();
if (areElementsEquivalent(trueAssignment.getLExpression(), falseAssignment.getLExpression()) &&
isSimpleOrUnchecked(falseVal)) {
return getTypeByLambdaCandidate(optionalVariable, trueVal, falseVal);
}
return ProblemType.NONE;
}
@NotNull
@Override
public String generateReplacement(@NotNull PsiElementFactory factory,
@NotNull CommentTracker ct,
@NotNull PsiReferenceExpression optionalRef,
PsiElement trueElement,
PsiElement falseElement) {
PsiAssignmentExpression trueAssignment = ExpressionUtils.getAssignment(trueElement);
PsiAssignmentExpression falseAssignment = ExpressionUtils.getAssignment(falseElement);
LOG.assertTrue(trueAssignment != null);
LOG.assertTrue(falseAssignment != null);
PsiExpression lValue = trueAssignment.getLExpression();
PsiExpression trueValue = trueAssignment.getRExpression();
PsiExpression falseValue = falseAssignment.getRExpression();
LOG.assertTrue(trueValue != null);
LOG.assertTrue(falseValue != null);
return lValue.getText() + " = " + generateOptionalUnwrap(factory, ct, optionalRef, trueValue, falseValue, lValue.getType()) + ";";
}
}
static class TernaryCase implements OptionalIsPresentCase {
@NotNull
@Override
public ProblemType getProblemType(@NotNull PsiReferenceExpression optionalVariable,
@Nullable PsiElement trueElement,
@Nullable PsiElement falseElement) {
if(!(trueElement instanceof PsiExpression) || !(falseElement instanceof PsiExpression)) return ProblemType.NONE;
PsiExpression trueExpression = (PsiExpression)trueElement;
PsiExpression falseExpression = (PsiExpression)falseElement;
PsiType trueType = trueExpression.getType();
PsiType falseType = falseExpression.getType();
if (trueType == null || falseType == null || !trueType.isAssignableFrom(falseType) || !isSimpleOrUnchecked(falseExpression)) {
return ProblemType.NONE;
}
return getTypeByLambdaCandidate(optionalVariable, trueExpression, falseExpression);
}
@NotNull
@Override
public String generateReplacement(@NotNull PsiElementFactory factory,
@NotNull CommentTracker ct,
@NotNull PsiReferenceExpression optionalVariable,
PsiElement trueElement,
PsiElement falseElement) {
PsiExpression ternary = PsiTreeUtil.getParentOfType(trueElement, PsiConditionalExpression.class);
LOG.assertTrue(ternary != null);
PsiExpression trueExpression = (PsiExpression)trueElement;
PsiExpression falseExpression = (PsiExpression)falseElement;
return generateOptionalUnwrap(factory, ct, optionalVariable, trueExpression, falseExpression, ternary.getType());
}
}
static class ConsumerCase implements OptionalIsPresentCase {
@NotNull
@Override
public ProblemType getProblemType(@NotNull PsiReferenceExpression optionalRef,
@Nullable PsiElement trueElement,
@Nullable PsiElement falseElement) {
if (falseElement != null && !(falseElement instanceof PsiEmptyStatement)) return ProblemType.NONE;
if (!(trueElement instanceof PsiStatement)) return ProblemType.NONE;
if (trueElement instanceof PsiExpressionStatement) {
PsiExpression expression = ((PsiExpressionStatement)trueElement).getExpression();
if (isOptionalGetCall(expression, optionalRef)) return ProblemType.NONE;
trueElement = expression;
}
return getTypeByLambdaCandidate(optionalRef, trueElement, null);
}
@NotNull
@Override
public String generateReplacement(@NotNull PsiElementFactory factory,
@NotNull CommentTracker ct,
@NotNull PsiReferenceExpression optionalRef,
PsiElement trueElement,
PsiElement falseElement) {
return optionalRef.getText() + ".ifPresent(" + generateOptionalLambda(factory, ct, optionalRef, trueElement) + ");";
}
}
}
| apache-2.0 |
yukuai518/gobblin | gobblin-data-management/src/main/java/gobblin/data/management/conversion/hive/converter/HiveAvroToFlattenedOrcConverter.java | 1990 | /*
* Copyright (C) 2014-2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package gobblin.data.management.conversion.hive.converter;
import org.apache.avro.Schema;
import gobblin.configuration.WorkUnitState;
import gobblin.data.management.conversion.hive.dataset.ConvertibleHiveDataset.ConversionConfig;
import gobblin.util.AvroFlattener;
/**
* An Avro to ORC converter for avro to flattened ORC. {@link OrcFormats#FLATTENED_ORC}
*/
public class HiveAvroToFlattenedOrcConverter extends AbstractAvroToOrcConverter {
private static AvroFlattener AVRO_FLATTENER = new AvroFlattener();
/**
* Flatten the <code>inputSchema</code>
* {@inheritDoc}
* @see gobblin.data.management.conversion.hive.converter.AbstractAvroToOrcConverter#convertSchema(org.apache.avro.Schema, gobblin.configuration.WorkUnitState)
*/
@Override
public Schema convertSchema(Schema inputSchema, WorkUnitState workUnit) {
return AVRO_FLATTENER.flatten(inputSchema, false);
}
/**
* Return true if flattened orc configurations are available. False otherwise
* {@inheritDoc}
* @see gobblin.data.management.conversion.hive.converter.AbstractAvroToOrcConverter#hasConversionConfig()
*/
@Override
protected boolean hasConversionConfig() {
return super.hiveDataset.getConversionConfigForFormat(OrcFormats.FLATTENED_ORC.getConfigPrefix()).isPresent();
}
@Override
protected ConversionConfig getConversionConfig() {
return super.hiveDataset.getConversionConfigForFormat(OrcFormats.FLATTENED_ORC.getConfigPrefix()).get();
}
}
| apache-2.0 |
spring-projects/spring-data-examples | jpa/deferred/src/main/java/example/service/Customer1533Service.java | 225 | package example.service;
import example.repo.Customer1533Repository;
import org.springframework.stereotype.Service;
@Service
public class Customer1533Service {
public Customer1533Service(Customer1533Repository repo) {}
}
| apache-2.0 |
Logicoin/logicoinj-alice | core/src/main/java/com/google/logicoin/discovery/DnsDiscovery.java | 4966 | /**
* Copyright 2011 John Sample
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.logicoin.discovery;
import com.google.logicoin.core.NetworkParameters;
import com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.*;
import java.util.concurrent.*;
/**
* <p>Supports peer discovery through DNS.</p>
*
* <p>This class does not support the testnet as currently there are no DNS servers providing testnet hosts.
* If this class is being used for testnet you must specify the hostnames to use.</p>
*
* <p>Failure to resolve individual host names will not cause an Exception to be thrown.
* However, if all hosts passed fail to resolve a PeerDiscoveryException will be thrown during getPeers().
* </p>
*
* <p>DNS seeds do not attempt to enumerate every peer on the network. {@link DnsDiscovery#getPeers(long, java.util.concurrent.TimeUnit)}
* will return up to 30 random peers from the set of those returned within the timeout period. If you want more peers
* to connect to, you need to discover them via other means (like addr broadcasts).</p>
*/
public class DnsDiscovery implements PeerDiscovery {
private static final Logger log = LoggerFactory.getLogger(DnsDiscovery.class);
private String[] hostNames;
private NetworkParameters netParams;
/**
* Supports finding peers through DNS A records. Community run DNS entry points will be used.
*
* @param netParams Network parameters to be used for port information.
*/
public DnsDiscovery(NetworkParameters netParams) {
this(netParams.getDnsSeeds(), netParams);
}
/**
* Supports finding peers through DNS A records.
*
* @param hostNames Host names to be examined for seed addresses.
* @param netParams Network parameters to be used for port information.
*/
public DnsDiscovery(String[] hostNames, NetworkParameters netParams) {
this.hostNames = hostNames;
this.netParams = netParams;
}
public InetSocketAddress[] getPeers(long timeoutValue, TimeUnit timeoutUnit) throws PeerDiscoveryException {
if (hostNames == null)
throw new PeerDiscoveryException("Unable to find any peers via DNS");
// Java doesn't have an async DNS API so we have to do all lookups in a thread pool, as sometimes seeds go
// hard down and it takes ages to give up and move on.
ExecutorService threadPool = Executors.newFixedThreadPool(hostNames.length);
try {
List<Callable<InetAddress[]>> tasks = Lists.newArrayList();
for (final String seed : hostNames)
tasks.add(new Callable<InetAddress[]>() {
public InetAddress[] call() throws Exception {
return InetAddress.getAllByName(seed);
}
});
final List<Future<InetAddress[]>> futures = threadPool.invokeAll(tasks, timeoutValue, timeoutUnit);
ArrayList<InetSocketAddress> addrs = Lists.newArrayList();
for (int i = 0; i < futures.size(); i++) {
Future<InetAddress[]> future = futures.get(i);
if (future.isCancelled()) {
log.warn("{} timed out", hostNames[i]);
continue; // Timed out.
}
final InetAddress[] inetAddresses;
try {
inetAddresses = future.get();
} catch (ExecutionException e) {
log.error("Failed to look up DNS seeds from {}: {}", hostNames[i], e.getMessage());
continue;
}
for (InetAddress addr : inetAddresses) {
addrs.add(new InetSocketAddress(addr, netParams.getPort()));
}
}
if (addrs.size() == 0)
throw new PeerDiscoveryException("Unable to find any peers via DNS");
Collections.shuffle(addrs);
threadPool.shutdownNow();
return addrs.toArray(new InetSocketAddress[addrs.size()]);
} catch (InterruptedException e) {
throw new PeerDiscoveryException(e);
} finally {
threadPool.shutdown();
}
}
/** We don't have a way to abort a DNS lookup, so this does nothing */
public void shutdown() {
}
}
| apache-2.0 |
inbloom/secure-data-service | tools/data-tools/src/org/slc/sli/test/edfi/entities/ParentIdentityType.java | 7483 | //
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2012.12.06 at 10:00:50 AM EST
//
package org.slc.sli.test.edfi.entities;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* Encapsulates the possible attributes that can be used to lookup the identity of parents and guardians. The various fields should be populated to uniquely identify a parent.
*
* <p>Java class for ParentIdentityType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ParentIdentityType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="ParentUniqueStateId" type="{http://ed-fi.org/0100}UniqueStateIdentifier" minOccurs="0"/>
* <element name="Name" type="{http://ed-fi.org/0100}Name" minOccurs="0"/>
* <element name="OtherName" type="{http://ed-fi.org/0100}OtherName" maxOccurs="unbounded" minOccurs="0"/>
* <element name="Sex" type="{http://ed-fi.org/0100}SexType" minOccurs="0"/>
* <element name="Address" type="{http://ed-fi.org/0100}Address" maxOccurs="unbounded" minOccurs="0"/>
* <element name="Telephone" type="{http://ed-fi.org/0100}Telephone" maxOccurs="unbounded" minOccurs="0"/>
* <element name="ElectronicMail" type="{http://ed-fi.org/0100}ElectronicMail" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ParentIdentityType", propOrder = {
"parentUniqueStateId",
"name",
"otherName",
"sex",
"address",
"telephone",
"electronicMail"
})
public class ParentIdentityType {
@XmlElement(name = "ParentUniqueStateId")
protected String parentUniqueStateId;
@XmlElement(name = "Name")
protected Name name;
@XmlElement(name = "OtherName")
protected List<OtherName> otherName;
@XmlElement(name = "Sex")
protected SexType sex;
@XmlElement(name = "Address")
protected List<Address> address;
@XmlElement(name = "Telephone")
protected List<Telephone> telephone;
@XmlElement(name = "ElectronicMail")
protected List<ElectronicMail> electronicMail;
/**
* Gets the value of the parentUniqueStateId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getParentUniqueStateId() {
return parentUniqueStateId;
}
/**
* Sets the value of the parentUniqueStateId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setParentUniqueStateId(String value) {
this.parentUniqueStateId = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link Name }
*
*/
public Name getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link Name }
*
*/
public void setName(Name value) {
this.name = value;
}
/**
* Gets the value of the otherName property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the otherName property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getOtherName().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link OtherName }
*
*
*/
public List<OtherName> getOtherName() {
if (otherName == null) {
otherName = new ArrayList<OtherName>();
}
return this.otherName;
}
/**
* Gets the value of the sex property.
*
* @return
* possible object is
* {@link SexType }
*
*/
public SexType getSex() {
return sex;
}
/**
* Sets the value of the sex property.
*
* @param value
* allowed object is
* {@link SexType }
*
*/
public void setSex(SexType value) {
this.sex = value;
}
/**
* Gets the value of the address property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the address property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAddress().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Address }
*
*
*/
public List<Address> getAddress() {
if (address == null) {
address = new ArrayList<Address>();
}
return this.address;
}
/**
* Gets the value of the telephone property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the telephone property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getTelephone().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Telephone }
*
*
*/
public List<Telephone> getTelephone() {
if (telephone == null) {
telephone = new ArrayList<Telephone>();
}
return this.telephone;
}
/**
* Gets the value of the electronicMail property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the electronicMail property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getElectronicMail().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link ElectronicMail }
*
*
*/
public List<ElectronicMail> getElectronicMail() {
if (electronicMail == null) {
electronicMail = new ArrayList<ElectronicMail>();
}
return this.electronicMail;
}
}
| apache-2.0 |
zyj1609wz/RecyclerView | app6/src/main/java/com/sunkai/stickheaderrecyleview/StickHeaderView.java | 351 | package com.sunkai.stickheaderrecyleview;
import android.content.Context;
import android.util.AttributeSet;
import android.widget.RelativeLayout;
/**
* Created by sunkai on 2016/12/2.
*/
public class StickHeaderView extends RelativeLayout {
public StickHeaderView(Context context, AttributeSet attrs) {
super(context, attrs);
}
}
| apache-2.0 |
harfalm/Sakai-10.1 | common/impl/src/java/org/sakaiproject/component/common/edu/person/OrganizationalPersonImpl.java | 5332 | /**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/common/tags/sakai-10.1/impl/src/java/org/sakaiproject/component/common/edu/person/OrganizationalPersonImpl.java $
* $Id: OrganizationalPersonImpl.java 105077 2012-02-24 22:54:29Z ottenhoff@longsight.com $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006 The Sakai Foundation.
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.component.common.edu.person;
import org.sakaiproject.api.common.edu.person.OrganizationalPerson;
import org.sakaiproject.api.common.edu.person.Person;
/**
* @author <a href="mailto:lance@indiana.edu">Lance Speelmon </a>
*/
public class OrganizationalPersonImpl extends PersonImpl implements Person, OrganizationalPerson
{
protected String facsimileTelephoneNumber;
/**
* @see org.sakaiproject.service.profile.OrganizationalPerson#getFacsimileTelephoneNumber()
*/
public String getFacsimileTelephoneNumber()
{
return facsimileTelephoneNumber;
}
/**
* @param facsimileTelephoneNumber
* The facsimileTelephoneNumber to set.
*/
public void setFacsimileTelephoneNumber(String facsimileTelephoneNumber)
{
this.facsimileTelephoneNumber = facsimileTelephoneNumber;
}
protected String localityName;
/**
* @see org.sakaiproject.service.profile.OrganizationalPerson#getLocalityName()
*/
public String getLocalityName()
{
return localityName;
}
/**
* @param localityName
* The localityName to set.
*/
public void setLocalityName(String localityName)
{
this.localityName = localityName;
}
protected String stateOrProvinceName;
/**
* @see org.sakaiproject.service.profile.OrganizationalPerson#getStateOrProvinceName()
*/
public String getStateOrProvinceName()
{
return stateOrProvinceName;
}
/**
* @param stateOrProvinceName
* The stateOrProvinceName to set.
*/
public void setStateOrProvinceName(String stateOrProvinceName)
{
this.stateOrProvinceName = stateOrProvinceName;
}
protected String postalCode;
/**
* @see org.sakaiproject.service.profile.OrganizationalPerson#getPostalCode()
*/
public String getPostalCode()
{
return postalCode;
}
/**
* @param postalCode
* The postalCode to set.
*/
public void setPostalCode(String postalCode)
{
this.postalCode = postalCode;
}
protected String postOfficeBox;
/**
* @see org.sakaiproject.service.profile.OrganizationalPerson#getPostOfficeBox()
*/
public String getPostOfficeBox()
{
return postOfficeBox;
}
/**
* @param postOfficeBox
* The postOfficeBox to set.
*/
public void setPostOfficeBox(String postOfficeBox)
{
this.postOfficeBox = postOfficeBox;
}
protected String streetAddress;
/**
* @see org.sakaiproject.service.profile.OrganizationalPerson#getStreetAddress()
*/
public String getStreetAddress()
{
return streetAddress;
}
/**
* @param streetAddress
* The streetAddress to set.
*/
public void setStreetAddress(String streetAddress)
{
this.streetAddress = streetAddress;
}
protected String physicalDeliveryOfficeName;
/**
* @see org.sakaiproject.service.profile.OrganizationalPerson#getPhysicalDeliveryOfficeName()
*/
public String getPhysicalDeliveryOfficeName()
{
return physicalDeliveryOfficeName;
}
/**
* @param physicalDeliveryOfficeName
* The physicalDeliveryOfficeName to set.
*/
public void setPhysicalDeliveryOfficeName(String physicalDeliveryOfficeName)
{
this.physicalDeliveryOfficeName = physicalDeliveryOfficeName;
}
protected String postalAddress;
/**
* @see org.sakaiproject.service.profile.OrganizationalPerson#getPostalAddress()
*/
public String getPostalAddress()
{
return postalAddress;
}
/**
* @param postalAddress
* The postalAddress to set.
*/
public void setPostalAddress(String postalAddress)
{
this.postalAddress = postalAddress;
}
protected String title;
/**
* @see org.sakaiproject.service.profile.OrganizationalPerson#getTitle()
*/
public String getTitle()
{
return title;
}
/**
* @param title
* The title to set.
*/
public void setTitle(String title)
{
this.title = title;
}
protected String organizationalUnit;
/**
* @see org.sakaiproject.service.profile.OrganizationalPerson#getOrganizationalUnit()
*/
public String getOrganizationalUnit()
{
return organizationalUnit;
}
/**
* @param organizationalUnit
* The organizationalUnit to set.
*/
public void setOrganizationalUnit(String organizationalUnit)
{
this.organizationalUnit = organizationalUnit;
}
}
| apache-2.0 |
Ariah-Group/Finance | af_webapp/src/main/java/org/kuali/kfs/module/ar/document/validation/impl/CustomerInvoiceDetailSubFundGroupReceivableValidation.java | 3396 | /*
* Copyright 2008 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kfs.module.ar.document.validation.impl;
import org.apache.commons.lang.StringUtils;
import org.kuali.kfs.coa.businessobject.Account;
import org.kuali.kfs.module.ar.ArConstants;
import org.kuali.kfs.module.ar.ArKeyConstants;
import org.kuali.kfs.module.ar.businessobject.CustomerInvoiceDetail;
import org.kuali.kfs.module.ar.document.CustomerInvoiceDocument;
import org.kuali.kfs.sys.KFSConstants;
import org.kuali.kfs.sys.KFSPropertyConstants;
import org.kuali.kfs.sys.document.validation.GenericValidation;
import org.kuali.kfs.sys.document.validation.event.AttributedDocumentEvent;
import org.kuali.rice.coreservice.framework.parameter.ParameterService;
import org.kuali.rice.krad.util.GlobalVariables;
import org.kuali.rice.krad.util.ObjectUtils;
public class CustomerInvoiceDetailSubFundGroupReceivableValidation extends GenericValidation {
protected CustomerInvoiceDetail customerInvoiceDetail;
protected ParameterService parameterService;
@Override
public boolean validate(AttributedDocumentEvent event) {
customerInvoiceDetail.refreshReferenceObject(KFSPropertyConstants.ACCOUNT);
Account account = customerInvoiceDetail.getAccount();
if (StringUtils.isNotEmpty(account.getSubFundGroupCode())) {
String receivableObjectCode = parameterService.getSubParameterValueAsString(CustomerInvoiceDocument.class, ArConstants.GLPE_RECEIVABLE_OFFSET_OBJECT_CODE_BY_SUB_FUND, account.getSubFundGroupCode());
if (StringUtils.isEmpty(receivableObjectCode)) {
GlobalVariables.getMessageMap().putError(KFSConstants.SUB_FUND_GROUP_CODE_PROPERTY_NAME, ArKeyConstants.ERROR_CUSTOMER_INVOICE_DOCUMENT_INVALID_SUBFUND_WITH_NO_AR_OBJ_CD, account.getSubFundGroupCode(), account.getAccountNumber());
return false;
} else {
customerInvoiceDetail.setAccountsReceivableObjectCode(receivableObjectCode);
customerInvoiceDetail.refreshReferenceObject("accountsReceivableObject");
if (ObjectUtils.isNull(customerInvoiceDetail.getAccountsReceivableObject())) {
GlobalVariables.getMessageMap().putError(KFSConstants.SUB_FUND_GROUP_CODE_PROPERTY_NAME, ArKeyConstants.ERROR_CUSTOMER_INVOICE_DOCUMENT_INVALID_SUBFUND_AR_OBJ_CD_IN_PARM, receivableObjectCode, account.getSubFundGroupCode(), account.getAccountNumber());
return false;
}
}
}
return true;
}
public void setCustomerInvoiceDetail(CustomerInvoiceDetail customerInvoiceDetail) {
this.customerInvoiceDetail = customerInvoiceDetail;
}
public void setParameterService(ParameterService parameterService) {
this.parameterService = parameterService;
}
}
| apache-2.0 |
metatron-app/metatron-discovery | discovery-server/src/main/java/app/metatron/discovery/domain/mdm/MetadataRepositoryExtends.java | 1688 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package app.metatron.discovery.domain.mdm;
import org.joda.time.DateTime;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import java.util.List;
public interface MetadataRepositoryExtends {
Page<Metadata> searchMetadatas(List<Metadata.SourceType> sourceType, String catalogId, String tag, String nameContains,
String searchDateBy, DateTime from, DateTime to, Pageable pageable);
Page<Metadata> searchMetadatas(String keyword, List<Metadata.SourceType> sourceType, String catalogId, String tag,
String nameContains, String descContains, List<String> userIds,
String searchDateBy, DateTime from, DateTime to, Pageable pageable);
List<Metadata> findBySource(String sourceId, String schema, List<String> table);
List<Metadata> findBySource(List<String> sourceIds);
List<Metadata> findByName(String name);
List<Metadata> findById(String id);
List<MetadataStatsDto> countBySourceType();
List<DataCreatorDTO> findDistinctCreatorByName(String nameContains);
}
| apache-2.0 |
palantir/http-remoting | extras/refresh-utils/src/test/java/com/palantir/conjure/java/ext/refresh/RefreshableProxyInvocationHandlerTest.java | 4576 | /*
* (c) Copyright 2017 Palantir Technologies Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.conjure.java.ext.refresh;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assertions.failBecauseExceptionWasNotThrown;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.common.reflect.Reflection;
import java.util.function.Function;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
public final class RefreshableProxyInvocationHandlerTest {
interface Callable {
void call();
}
@Mock
private Function<Object, Callable> supplier;
@Mock
private Callable delegate1;
@Mock
private Callable delegate2;
@Before
public void before() {
MockitoAnnotations.initMocks(this);
}
@Test
public void testCannotConstructProxyWhenInitialRefreshableIsEmpty() throws Exception {
Refreshable<Object> refreshable = Refreshable.empty();
try {
RefreshableProxyInvocationHandler.create(refreshable, supplier);
failBecauseExceptionWasNotThrown(IllegalArgumentException.class);
} catch (IllegalArgumentException e) {
assertThat(e)
.hasMessage("Cannot construct RefreshableProxyInvocationHandler with empty initial refreshable");
}
}
@Test
public void testUsesInitialDelegateAndUpdatesDelegatesWhenRefreshableChanges() throws Exception {
Object object1 = new Object();
Object object2 = new Object();
// Setup initial proxy to call out to delegate1, this fetches the initial delegate from the supplier.
when(supplier.apply(object1)).thenReturn(delegate1);
Refreshable<Object> refreshable = Refreshable.of(object1);
RefreshableProxyInvocationHandler<Object, Callable> handler =
RefreshableProxyInvocationHandler.create(refreshable, supplier);
verify(supplier).apply(object1);
@SuppressWarnings("ProxyNonConstantType")
Callable proxy = Reflection.newProxy(Callable.class, handler);
// First call: check that delegate 1 received call and that supplier is not invoked.
proxy.call();
verify(delegate1).call();
Mockito.verifyNoMoreInteractions(delegate1, delegate2, supplier);
// Second call: still using delegate1, not invoking the supplier.
proxy.call();
verify(delegate1, times(2)).call();
Mockito.verifyNoMoreInteractions(delegate1, delegate2, supplier);
// Third call: refresh the object and make the supplier return a new delegate2.
refreshable.set(object2);
when(supplier.apply(object2)).thenReturn(delegate2);
proxy.call();
verify(delegate2).call();
verify(supplier).apply(object2);
Mockito.verifyNoMoreInteractions(delegate1, delegate2, supplier);
// Fourth call: still using delegate2, not invoking the supplier.
proxy.call();
verify(delegate2, times(2)).call();
Mockito.verifyNoMoreInteractions(delegate1, delegate2, supplier);
}
@Test
public void testUnwrapsInvocationTargetExceptions() {
Callable throwingCallable = () -> {
throw new IllegalStateException("Whoops");
};
Refreshable<Callable> refreshable = Refreshable.of(throwingCallable);
RefreshableProxyInvocationHandler<Callable, Callable> handler =
RefreshableProxyInvocationHandler.create(refreshable, _tec -> throwingCallable);
@SuppressWarnings("ProxyNonConstantType")
Callable proxy = Reflection.newProxy(Callable.class, handler);
assertThatThrownBy(proxy::call)
.isInstanceOf(IllegalStateException.class)
.hasMessage("Whoops");
}
}
| apache-2.0 |
ronakkhunt/typeahead | src/main/java/com/typeahead/constants/FileExtension.java | 368 | package com.typeahead.constants;
public enum FileExtension {
DATA_MAP(".dm"), FIELD_FST_MAP(".ffm"), MAPPING(".mapping"), METADATA(".metadata"),
DATA_MAP_DOCUMENT(".dmd"), DELETE_INDEX(".del")
;
private String extension;
private FileExtension(String extension) {
this.extension = extension;
}
public String getExtension() {
return this.extension;
}
}
| apache-2.0 |
nicolaferraro/camel | core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/HdfsEndpointBuilderFactory.java | 85813 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
import org.apache.camel.ExchangePattern;
import org.apache.camel.LoggingLevel;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
import org.apache.camel.spi.ExceptionHandler;
import org.apache.camel.spi.PollingConsumerPollStrategy;
/**
* Read and write from/to an HDFS filesystem using Hadoop 2.x.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface HdfsEndpointBuilderFactory {
/**
* Builder for endpoint consumers for the HDFS component.
*/
public interface HdfsEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default AdvancedHdfsEndpointConsumerBuilder advanced() {
return (AdvancedHdfsEndpointConsumerBuilder) this;
}
/**
* Whether to connect to the HDFS file system on starting the
* producer/consumer. If false then the connection is created on-demand.
* Notice that HDFS may take up till 15 minutes to establish a
* connection, as it has hardcoded 45 x 20 sec redelivery. By setting
* this option to false allows your application to startup, and not
* block for up till 15 minutes.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*/
default HdfsEndpointConsumerBuilder connectOnStartup(
boolean connectOnStartup) {
doSetProperty("connectOnStartup", connectOnStartup);
return this;
}
/**
* Whether to connect to the HDFS file system on starting the
* producer/consumer. If false then the connection is created on-demand.
* Notice that HDFS may take up till 15 minutes to establish a
* connection, as it has hardcoded 45 x 20 sec redelivery. By setting
* this option to false allows your application to startup, and not
* block for up till 15 minutes.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: common
*/
default HdfsEndpointConsumerBuilder connectOnStartup(
String connectOnStartup) {
doSetProperty("connectOnStartup", connectOnStartup);
return this;
}
/**
* Set to LOCAL to not use HDFS but local java.io.File instead.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.HdfsFileSystemType</code> type.
*
* Default: HDFS
* Group: common
*/
default HdfsEndpointConsumerBuilder fileSystemType(
HdfsFileSystemType fileSystemType) {
doSetProperty("fileSystemType", fileSystemType);
return this;
}
/**
* Set to LOCAL to not use HDFS but local java.io.File instead.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.HdfsFileSystemType</code> type.
*
* Default: HDFS
* Group: common
*/
default HdfsEndpointConsumerBuilder fileSystemType(String fileSystemType) {
doSetProperty("fileSystemType", fileSystemType);
return this;
}
/**
* The file type to use. For more details see Hadoop HDFS documentation
* about the various files types.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.HdfsFileType</code> type.
*
* Default: NORMAL_FILE
* Group: common
*/
default HdfsEndpointConsumerBuilder fileType(HdfsFileType fileType) {
doSetProperty("fileType", fileType);
return this;
}
/**
* The file type to use. For more details see Hadoop HDFS documentation
* about the various files types.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.HdfsFileType</code> type.
*
* Default: NORMAL_FILE
* Group: common
*/
default HdfsEndpointConsumerBuilder fileType(String fileType) {
doSetProperty("fileType", fileType);
return this;
}
/**
* The type for the key in case of sequence or map files.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.WritableType</code> type.
*
* Default: NULL
* Group: common
*/
default HdfsEndpointConsumerBuilder keyType(WritableType keyType) {
doSetProperty("keyType", keyType);
return this;
}
/**
* The type for the key in case of sequence or map files.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.WritableType</code> type.
*
* Default: NULL
* Group: common
*/
default HdfsEndpointConsumerBuilder keyType(String keyType) {
doSetProperty("keyType", keyType);
return this;
}
/**
* A comma separated list of named nodes (e.g.
* srv11.example.com:8020,srv12.example.com:8020).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default HdfsEndpointConsumerBuilder namedNodes(String namedNodes) {
doSetProperty("namedNodes", namedNodes);
return this;
}
/**
* The file owner must match this owner for the consumer to pickup the
* file. Otherwise the file is skipped.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default HdfsEndpointConsumerBuilder owner(String owner) {
doSetProperty("owner", owner);
return this;
}
/**
* The type for the key in case of sequence or map files.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.WritableType</code> type.
*
* Default: BYTES
* Group: common
*/
default HdfsEndpointConsumerBuilder valueType(WritableType valueType) {
doSetProperty("valueType", valueType);
return this;
}
/**
* The type for the key in case of sequence or map files.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.WritableType</code> type.
*
* Default: BYTES
* Group: common
*/
default HdfsEndpointConsumerBuilder valueType(String valueType) {
doSetProperty("valueType", valueType);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default HdfsEndpointConsumerBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default HdfsEndpointConsumerBuilder bridgeErrorHandler(
String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* The pattern used for scanning the directory.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: *
* Group: consumer
*/
default HdfsEndpointConsumerBuilder pattern(String pattern) {
doSetProperty("pattern", pattern);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default HdfsEndpointConsumerBuilder sendEmptyMessageWhenIdle(
boolean sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default HdfsEndpointConsumerBuilder sendEmptyMessageWhenIdle(
String sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* Sets the download method to use when not using a local working
* directory. If set to true, the remote files are streamed to the route
* as they are read. When set to false, the remote files are loaded into
* memory before being sent into the route.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default HdfsEndpointConsumerBuilder streamDownload(
boolean streamDownload) {
doSetProperty("streamDownload", streamDownload);
return this;
}
/**
* Sets the download method to use when not using a local working
* directory. If set to true, the remote files are streamed to the route
* as they are read. When set to false, the remote files are loaded into
* memory before being sent into the route.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default HdfsEndpointConsumerBuilder streamDownload(String streamDownload) {
doSetProperty("streamDownload", streamDownload);
return this;
}
/**
* To define a maximum messages to gather per poll. By default a limit
* of 100 is set. Can be used to set a limit of e.g. 1000 to avoid when
* starting up the server that there are thousands of files. Values can
* only be greater than 0. Notice: If this option is in use then the
* limit will be applied on the valid files. For example if you have
* 100000 files and use maxMessagesPerPoll=500, then only the first 500
* files will be picked up.
*
* The option is a: <code>int</code> type.
*
* Default: 100
* Group: filter
*/
default HdfsEndpointConsumerBuilder maxMessagesPerPoll(
int maxMessagesPerPoll) {
doSetProperty("maxMessagesPerPoll", maxMessagesPerPoll);
return this;
}
/**
* To define a maximum messages to gather per poll. By default a limit
* of 100 is set. Can be used to set a limit of e.g. 1000 to avoid when
* starting up the server that there are thousands of files. Values can
* only be greater than 0. Notice: If this option is in use then the
* limit will be applied on the valid files. For example if you have
* 100000 files and use maxMessagesPerPoll=500, then only the first 500
* files will be picked up.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 100
* Group: filter
*/
default HdfsEndpointConsumerBuilder maxMessagesPerPoll(
String maxMessagesPerPoll) {
doSetProperty("maxMessagesPerPoll", maxMessagesPerPoll);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder backoffErrorThreshold(
int backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder backoffErrorThreshold(
String backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder backoffIdleThreshold(
int backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder backoffIdleThreshold(
String backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder backoffMultiplier(
int backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder backoffMultiplier(
String backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option is a: <code>long</code> type.
*
* Default: 500
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder delay(long delay) {
doSetProperty("delay", delay);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 500
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder delay(String delay) {
doSetProperty("delay", delay);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder greedy(boolean greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder greedy(String greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option is a: <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder initialDelay(long initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder initialDelay(String initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option is a: <code>long</code> type.
*
* Default: 0
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder repeatCount(long repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 0
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder repeatCount(String repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option is a: <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder runLoggingLevel(
LoggingLevel runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option will be converted to a
* <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder runLoggingLevel(
String runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option is a:
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder scheduledExecutorService(
ScheduledExecutorService scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option will be converted to a
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder scheduledExecutorService(
String scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option is a: <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder scheduler(Object scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option will be converted to a <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder scheduler(String scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder schedulerProperties(
String key,
Object value) {
doSetMultiValueProperty("schedulerProperties", "scheduler." + key, value);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder schedulerProperties(Map values) {
doSetMultiValueProperties("schedulerProperties", "scheduler.", values);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder startScheduler(
boolean startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder startScheduler(String startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option is a: <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder timeUnit(TimeUnit timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option will be converted to a
* <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder timeUnit(String timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder useFixedDelay(boolean useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*/
default HdfsEndpointConsumerBuilder useFixedDelay(String useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* The location of the kerb5.conf file
* (https://web.mit.edu/kerberos/krb5-1.12/doc/admin/conf_files/krb5_conf.html).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default HdfsEndpointConsumerBuilder kerberosConfigFileLocation(
String kerberosConfigFileLocation) {
doSetProperty("kerberosConfigFileLocation", kerberosConfigFileLocation);
return this;
}
/**
* The location of the keytab file used to authenticate with the
* kerberos nodes (contains pairs of kerberos principals and encrypted
* keys (which are derived from the Kerberos password)).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default HdfsEndpointConsumerBuilder kerberosKeytabLocation(
String kerberosKeytabLocation) {
doSetProperty("kerberosKeytabLocation", kerberosKeytabLocation);
return this;
}
/**
* The username used to authenticate with the kerberos nodes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default HdfsEndpointConsumerBuilder kerberosUsername(
String kerberosUsername) {
doSetProperty("kerberosUsername", kerberosUsername);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the HDFS component.
*/
public interface AdvancedHdfsEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default HdfsEndpointConsumerBuilder basic() {
return (HdfsEndpointConsumerBuilder) this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*/
default AdvancedHdfsEndpointConsumerBuilder exceptionHandler(
ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedHdfsEndpointConsumerBuilder exceptionHandler(
String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedHdfsEndpointConsumerBuilder exchangePattern(
ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedHdfsEndpointConsumerBuilder exchangePattern(
String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option is a:
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedHdfsEndpointConsumerBuilder pollStrategy(
PollingConsumerPollStrategy pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option will be converted to a
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedHdfsEndpointConsumerBuilder pollStrategy(
String pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
/**
* The size of the HDFS blocks.
*
* The option is a: <code>long</code> type.
*
* Default: 67108864
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder blockSize(long blockSize) {
doSetProperty("blockSize", blockSize);
return this;
}
/**
* The size of the HDFS blocks.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 67108864
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder blockSize(String blockSize) {
doSetProperty("blockSize", blockSize);
return this;
}
/**
* The buffer size used by HDFS.
*
* The option is a: <code>int</code> type.
*
* Default: 4096
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder bufferSize(int bufferSize) {
doSetProperty("bufferSize", bufferSize);
return this;
}
/**
* The buffer size used by HDFS.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 4096
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder bufferSize(String bufferSize) {
doSetProperty("bufferSize", bufferSize);
return this;
}
/**
* How often (time in millis) in to run the idle checker background
* task. This option is only in use if the splitter strategy is IDLE.
*
* The option is a: <code>int</code> type.
*
* Default: 500
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder checkIdleInterval(
int checkIdleInterval) {
doSetProperty("checkIdleInterval", checkIdleInterval);
return this;
}
/**
* How often (time in millis) in to run the idle checker background
* task. This option is only in use if the splitter strategy is IDLE.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 500
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder checkIdleInterval(
String checkIdleInterval) {
doSetProperty("checkIdleInterval", checkIdleInterval);
return this;
}
/**
* When reading a normal file, this is split into chunks producing a
* message per chunk.
*
* The option is a: <code>int</code> type.
*
* Default: 4096
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder chunkSize(int chunkSize) {
doSetProperty("chunkSize", chunkSize);
return this;
}
/**
* When reading a normal file, this is split into chunks producing a
* message per chunk.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 4096
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder chunkSize(String chunkSize) {
doSetProperty("chunkSize", chunkSize);
return this;
}
/**
* The compression codec to use.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.HdfsCompressionCodec</code>
* type.
*
* Default: DEFAULT
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder compressionCodec(
HdfsCompressionCodec compressionCodec) {
doSetProperty("compressionCodec", compressionCodec);
return this;
}
/**
* The compression codec to use.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.HdfsCompressionCodec</code>
* type.
*
* Default: DEFAULT
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder compressionCodec(
String compressionCodec) {
doSetProperty("compressionCodec", compressionCodec);
return this;
}
/**
* The compression type to use (is default not in use).
*
* The option is a:
* <code>org.apache.hadoop.io.SequenceFile$CompressionType</code> type.
*
* Default: NONE
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder compressionType(
CompressionType compressionType) {
doSetProperty("compressionType", compressionType);
return this;
}
/**
* The compression type to use (is default not in use).
*
* The option will be converted to a
* <code>org.apache.hadoop.io.SequenceFile$CompressionType</code> type.
*
* Default: NONE
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder compressionType(
String compressionType) {
doSetProperty("compressionType", compressionType);
return this;
}
/**
* When a file is opened for reading/writing the file is renamed with
* this suffix to avoid to read it during the writing phase.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: opened
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder openedSuffix(
String openedSuffix) {
doSetProperty("openedSuffix", openedSuffix);
return this;
}
/**
* Once the file has been read is renamed with this suffix to avoid to
* read it again.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: read
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder readSuffix(String readSuffix) {
doSetProperty("readSuffix", readSuffix);
return this;
}
/**
* The HDFS replication factor.
*
* The option is a: <code>short</code> type.
*
* Default: 3
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder replication(
short replication) {
doSetProperty("replication", replication);
return this;
}
/**
* The HDFS replication factor.
*
* The option will be converted to a <code>short</code> type.
*
* Default: 3
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder replication(
String replication) {
doSetProperty("replication", replication);
return this;
}
/**
* In the current version of Hadoop opening a file in append mode is
* disabled since it's not very reliable. So, for the moment, it's only
* possible to create new files. The Camel HDFS endpoint tries to solve
* this problem in this way: If the split strategy option has been
* defined, the hdfs path will be used as a directory and files will be
* created using the configured UuidGenerator. Every time a splitting
* condition is met, a new file is created. The splitStrategy option is
* defined as a string with the following syntax:
* splitStrategy=ST:value,ST:value,... where ST can be: BYTES a new file
* is created, and the old is closed when the number of written bytes is
* more than value MESSAGES a new file is created, and the old is closed
* when the number of written messages is more than value IDLE a new
* file is created, and the old is closed when no writing happened in
* the last value milliseconds.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder splitStrategy(
String splitStrategy) {
doSetProperty("splitStrategy", splitStrategy);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedHdfsEndpointConsumerBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint producers for the HDFS component.
*/
public interface HdfsEndpointProducerBuilder
extends
EndpointProducerBuilder {
default AdvancedHdfsEndpointProducerBuilder advanced() {
return (AdvancedHdfsEndpointProducerBuilder) this;
}
/**
* Whether to connect to the HDFS file system on starting the
* producer/consumer. If false then the connection is created on-demand.
* Notice that HDFS may take up till 15 minutes to establish a
* connection, as it has hardcoded 45 x 20 sec redelivery. By setting
* this option to false allows your application to startup, and not
* block for up till 15 minutes.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*/
default HdfsEndpointProducerBuilder connectOnStartup(
boolean connectOnStartup) {
doSetProperty("connectOnStartup", connectOnStartup);
return this;
}
/**
* Whether to connect to the HDFS file system on starting the
* producer/consumer. If false then the connection is created on-demand.
* Notice that HDFS may take up till 15 minutes to establish a
* connection, as it has hardcoded 45 x 20 sec redelivery. By setting
* this option to false allows your application to startup, and not
* block for up till 15 minutes.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: common
*/
default HdfsEndpointProducerBuilder connectOnStartup(
String connectOnStartup) {
doSetProperty("connectOnStartup", connectOnStartup);
return this;
}
/**
* Set to LOCAL to not use HDFS but local java.io.File instead.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.HdfsFileSystemType</code> type.
*
* Default: HDFS
* Group: common
*/
default HdfsEndpointProducerBuilder fileSystemType(
HdfsFileSystemType fileSystemType) {
doSetProperty("fileSystemType", fileSystemType);
return this;
}
/**
* Set to LOCAL to not use HDFS but local java.io.File instead.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.HdfsFileSystemType</code> type.
*
* Default: HDFS
* Group: common
*/
default HdfsEndpointProducerBuilder fileSystemType(String fileSystemType) {
doSetProperty("fileSystemType", fileSystemType);
return this;
}
/**
* The file type to use. For more details see Hadoop HDFS documentation
* about the various files types.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.HdfsFileType</code> type.
*
* Default: NORMAL_FILE
* Group: common
*/
default HdfsEndpointProducerBuilder fileType(HdfsFileType fileType) {
doSetProperty("fileType", fileType);
return this;
}
/**
* The file type to use. For more details see Hadoop HDFS documentation
* about the various files types.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.HdfsFileType</code> type.
*
* Default: NORMAL_FILE
* Group: common
*/
default HdfsEndpointProducerBuilder fileType(String fileType) {
doSetProperty("fileType", fileType);
return this;
}
/**
* The type for the key in case of sequence or map files.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.WritableType</code> type.
*
* Default: NULL
* Group: common
*/
default HdfsEndpointProducerBuilder keyType(WritableType keyType) {
doSetProperty("keyType", keyType);
return this;
}
/**
* The type for the key in case of sequence or map files.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.WritableType</code> type.
*
* Default: NULL
* Group: common
*/
default HdfsEndpointProducerBuilder keyType(String keyType) {
doSetProperty("keyType", keyType);
return this;
}
/**
* A comma separated list of named nodes (e.g.
* srv11.example.com:8020,srv12.example.com:8020).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default HdfsEndpointProducerBuilder namedNodes(String namedNodes) {
doSetProperty("namedNodes", namedNodes);
return this;
}
/**
* The file owner must match this owner for the consumer to pickup the
* file. Otherwise the file is skipped.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default HdfsEndpointProducerBuilder owner(String owner) {
doSetProperty("owner", owner);
return this;
}
/**
* The type for the key in case of sequence or map files.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.WritableType</code> type.
*
* Default: BYTES
* Group: common
*/
default HdfsEndpointProducerBuilder valueType(WritableType valueType) {
doSetProperty("valueType", valueType);
return this;
}
/**
* The type for the key in case of sequence or map files.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.WritableType</code> type.
*
* Default: BYTES
* Group: common
*/
default HdfsEndpointProducerBuilder valueType(String valueType) {
doSetProperty("valueType", valueType);
return this;
}
/**
* Append to existing file. Notice that not all HDFS file systems
* support the append option.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default HdfsEndpointProducerBuilder append(boolean append) {
doSetProperty("append", append);
return this;
}
/**
* Append to existing file. Notice that not all HDFS file systems
* support the append option.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default HdfsEndpointProducerBuilder append(String append) {
doSetProperty("append", append);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default HdfsEndpointProducerBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default HdfsEndpointProducerBuilder lazyStartProducer(
String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether to overwrite existing files with the same name.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*/
default HdfsEndpointProducerBuilder overwrite(boolean overwrite) {
doSetProperty("overwrite", overwrite);
return this;
}
/**
* Whether to overwrite existing files with the same name.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: producer
*/
default HdfsEndpointProducerBuilder overwrite(String overwrite) {
doSetProperty("overwrite", overwrite);
return this;
}
/**
* The location of the kerb5.conf file
* (https://web.mit.edu/kerberos/krb5-1.12/doc/admin/conf_files/krb5_conf.html).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default HdfsEndpointProducerBuilder kerberosConfigFileLocation(
String kerberosConfigFileLocation) {
doSetProperty("kerberosConfigFileLocation", kerberosConfigFileLocation);
return this;
}
/**
* The location of the keytab file used to authenticate with the
* kerberos nodes (contains pairs of kerberos principals and encrypted
* keys (which are derived from the Kerberos password)).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default HdfsEndpointProducerBuilder kerberosKeytabLocation(
String kerberosKeytabLocation) {
doSetProperty("kerberosKeytabLocation", kerberosKeytabLocation);
return this;
}
/**
* The username used to authenticate with the kerberos nodes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default HdfsEndpointProducerBuilder kerberosUsername(
String kerberosUsername) {
doSetProperty("kerberosUsername", kerberosUsername);
return this;
}
}
/**
* Advanced builder for endpoint producers for the HDFS component.
*/
public interface AdvancedHdfsEndpointProducerBuilder
extends
EndpointProducerBuilder {
default HdfsEndpointProducerBuilder basic() {
return (HdfsEndpointProducerBuilder) this;
}
/**
* The size of the HDFS blocks.
*
* The option is a: <code>long</code> type.
*
* Default: 67108864
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder blockSize(long blockSize) {
doSetProperty("blockSize", blockSize);
return this;
}
/**
* The size of the HDFS blocks.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 67108864
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder blockSize(String blockSize) {
doSetProperty("blockSize", blockSize);
return this;
}
/**
* The buffer size used by HDFS.
*
* The option is a: <code>int</code> type.
*
* Default: 4096
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder bufferSize(int bufferSize) {
doSetProperty("bufferSize", bufferSize);
return this;
}
/**
* The buffer size used by HDFS.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 4096
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder bufferSize(String bufferSize) {
doSetProperty("bufferSize", bufferSize);
return this;
}
/**
* How often (time in millis) in to run the idle checker background
* task. This option is only in use if the splitter strategy is IDLE.
*
* The option is a: <code>int</code> type.
*
* Default: 500
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder checkIdleInterval(
int checkIdleInterval) {
doSetProperty("checkIdleInterval", checkIdleInterval);
return this;
}
/**
* How often (time in millis) in to run the idle checker background
* task. This option is only in use if the splitter strategy is IDLE.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 500
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder checkIdleInterval(
String checkIdleInterval) {
doSetProperty("checkIdleInterval", checkIdleInterval);
return this;
}
/**
* When reading a normal file, this is split into chunks producing a
* message per chunk.
*
* The option is a: <code>int</code> type.
*
* Default: 4096
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder chunkSize(int chunkSize) {
doSetProperty("chunkSize", chunkSize);
return this;
}
/**
* When reading a normal file, this is split into chunks producing a
* message per chunk.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 4096
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder chunkSize(String chunkSize) {
doSetProperty("chunkSize", chunkSize);
return this;
}
/**
* The compression codec to use.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.HdfsCompressionCodec</code>
* type.
*
* Default: DEFAULT
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder compressionCodec(
HdfsCompressionCodec compressionCodec) {
doSetProperty("compressionCodec", compressionCodec);
return this;
}
/**
* The compression codec to use.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.HdfsCompressionCodec</code>
* type.
*
* Default: DEFAULT
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder compressionCodec(
String compressionCodec) {
doSetProperty("compressionCodec", compressionCodec);
return this;
}
/**
* The compression type to use (is default not in use).
*
* The option is a:
* <code>org.apache.hadoop.io.SequenceFile$CompressionType</code> type.
*
* Default: NONE
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder compressionType(
CompressionType compressionType) {
doSetProperty("compressionType", compressionType);
return this;
}
/**
* The compression type to use (is default not in use).
*
* The option will be converted to a
* <code>org.apache.hadoop.io.SequenceFile$CompressionType</code> type.
*
* Default: NONE
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder compressionType(
String compressionType) {
doSetProperty("compressionType", compressionType);
return this;
}
/**
* When a file is opened for reading/writing the file is renamed with
* this suffix to avoid to read it during the writing phase.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: opened
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder openedSuffix(
String openedSuffix) {
doSetProperty("openedSuffix", openedSuffix);
return this;
}
/**
* Once the file has been read is renamed with this suffix to avoid to
* read it again.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: read
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder readSuffix(String readSuffix) {
doSetProperty("readSuffix", readSuffix);
return this;
}
/**
* The HDFS replication factor.
*
* The option is a: <code>short</code> type.
*
* Default: 3
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder replication(
short replication) {
doSetProperty("replication", replication);
return this;
}
/**
* The HDFS replication factor.
*
* The option will be converted to a <code>short</code> type.
*
* Default: 3
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder replication(
String replication) {
doSetProperty("replication", replication);
return this;
}
/**
* In the current version of Hadoop opening a file in append mode is
* disabled since it's not very reliable. So, for the moment, it's only
* possible to create new files. The Camel HDFS endpoint tries to solve
* this problem in this way: If the split strategy option has been
* defined, the hdfs path will be used as a directory and files will be
* created using the configured UuidGenerator. Every time a splitting
* condition is met, a new file is created. The splitStrategy option is
* defined as a string with the following syntax:
* splitStrategy=ST:value,ST:value,... where ST can be: BYTES a new file
* is created, and the old is closed when the number of written bytes is
* more than value MESSAGES a new file is created, and the old is closed
* when the number of written messages is more than value IDLE a new
* file is created, and the old is closed when no writing happened in
* the last value milliseconds.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder splitStrategy(
String splitStrategy) {
doSetProperty("splitStrategy", splitStrategy);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedHdfsEndpointProducerBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint for the HDFS component.
*/
public interface HdfsEndpointBuilder
extends
HdfsEndpointConsumerBuilder,
HdfsEndpointProducerBuilder {
default AdvancedHdfsEndpointBuilder advanced() {
return (AdvancedHdfsEndpointBuilder) this;
}
/**
* Whether to connect to the HDFS file system on starting the
* producer/consumer. If false then the connection is created on-demand.
* Notice that HDFS may take up till 15 minutes to establish a
* connection, as it has hardcoded 45 x 20 sec redelivery. By setting
* this option to false allows your application to startup, and not
* block for up till 15 minutes.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*/
default HdfsEndpointBuilder connectOnStartup(boolean connectOnStartup) {
doSetProperty("connectOnStartup", connectOnStartup);
return this;
}
/**
* Whether to connect to the HDFS file system on starting the
* producer/consumer. If false then the connection is created on-demand.
* Notice that HDFS may take up till 15 minutes to establish a
* connection, as it has hardcoded 45 x 20 sec redelivery. By setting
* this option to false allows your application to startup, and not
* block for up till 15 minutes.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: common
*/
default HdfsEndpointBuilder connectOnStartup(String connectOnStartup) {
doSetProperty("connectOnStartup", connectOnStartup);
return this;
}
/**
* Set to LOCAL to not use HDFS but local java.io.File instead.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.HdfsFileSystemType</code> type.
*
* Default: HDFS
* Group: common
*/
default HdfsEndpointBuilder fileSystemType(
HdfsFileSystemType fileSystemType) {
doSetProperty("fileSystemType", fileSystemType);
return this;
}
/**
* Set to LOCAL to not use HDFS but local java.io.File instead.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.HdfsFileSystemType</code> type.
*
* Default: HDFS
* Group: common
*/
default HdfsEndpointBuilder fileSystemType(String fileSystemType) {
doSetProperty("fileSystemType", fileSystemType);
return this;
}
/**
* The file type to use. For more details see Hadoop HDFS documentation
* about the various files types.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.HdfsFileType</code> type.
*
* Default: NORMAL_FILE
* Group: common
*/
default HdfsEndpointBuilder fileType(HdfsFileType fileType) {
doSetProperty("fileType", fileType);
return this;
}
/**
* The file type to use. For more details see Hadoop HDFS documentation
* about the various files types.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.HdfsFileType</code> type.
*
* Default: NORMAL_FILE
* Group: common
*/
default HdfsEndpointBuilder fileType(String fileType) {
doSetProperty("fileType", fileType);
return this;
}
/**
* The type for the key in case of sequence or map files.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.WritableType</code> type.
*
* Default: NULL
* Group: common
*/
default HdfsEndpointBuilder keyType(WritableType keyType) {
doSetProperty("keyType", keyType);
return this;
}
/**
* The type for the key in case of sequence or map files.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.WritableType</code> type.
*
* Default: NULL
* Group: common
*/
default HdfsEndpointBuilder keyType(String keyType) {
doSetProperty("keyType", keyType);
return this;
}
/**
* A comma separated list of named nodes (e.g.
* srv11.example.com:8020,srv12.example.com:8020).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default HdfsEndpointBuilder namedNodes(String namedNodes) {
doSetProperty("namedNodes", namedNodes);
return this;
}
/**
* The file owner must match this owner for the consumer to pickup the
* file. Otherwise the file is skipped.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default HdfsEndpointBuilder owner(String owner) {
doSetProperty("owner", owner);
return this;
}
/**
* The type for the key in case of sequence or map files.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.WritableType</code> type.
*
* Default: BYTES
* Group: common
*/
default HdfsEndpointBuilder valueType(WritableType valueType) {
doSetProperty("valueType", valueType);
return this;
}
/**
* The type for the key in case of sequence or map files.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.WritableType</code> type.
*
* Default: BYTES
* Group: common
*/
default HdfsEndpointBuilder valueType(String valueType) {
doSetProperty("valueType", valueType);
return this;
}
/**
* The location of the kerb5.conf file
* (https://web.mit.edu/kerberos/krb5-1.12/doc/admin/conf_files/krb5_conf.html).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default HdfsEndpointBuilder kerberosConfigFileLocation(
String kerberosConfigFileLocation) {
doSetProperty("kerberosConfigFileLocation", kerberosConfigFileLocation);
return this;
}
/**
* The location of the keytab file used to authenticate with the
* kerberos nodes (contains pairs of kerberos principals and encrypted
* keys (which are derived from the Kerberos password)).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default HdfsEndpointBuilder kerberosKeytabLocation(
String kerberosKeytabLocation) {
doSetProperty("kerberosKeytabLocation", kerberosKeytabLocation);
return this;
}
/**
* The username used to authenticate with the kerberos nodes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default HdfsEndpointBuilder kerberosUsername(String kerberosUsername) {
doSetProperty("kerberosUsername", kerberosUsername);
return this;
}
}
/**
* Advanced builder for endpoint for the HDFS component.
*/
public interface AdvancedHdfsEndpointBuilder
extends
AdvancedHdfsEndpointConsumerBuilder,
AdvancedHdfsEndpointProducerBuilder {
default HdfsEndpointBuilder basic() {
return (HdfsEndpointBuilder) this;
}
/**
* The size of the HDFS blocks.
*
* The option is a: <code>long</code> type.
*
* Default: 67108864
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder blockSize(long blockSize) {
doSetProperty("blockSize", blockSize);
return this;
}
/**
* The size of the HDFS blocks.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 67108864
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder blockSize(String blockSize) {
doSetProperty("blockSize", blockSize);
return this;
}
/**
* The buffer size used by HDFS.
*
* The option is a: <code>int</code> type.
*
* Default: 4096
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder bufferSize(int bufferSize) {
doSetProperty("bufferSize", bufferSize);
return this;
}
/**
* The buffer size used by HDFS.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 4096
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder bufferSize(String bufferSize) {
doSetProperty("bufferSize", bufferSize);
return this;
}
/**
* How often (time in millis) in to run the idle checker background
* task. This option is only in use if the splitter strategy is IDLE.
*
* The option is a: <code>int</code> type.
*
* Default: 500
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder checkIdleInterval(
int checkIdleInterval) {
doSetProperty("checkIdleInterval", checkIdleInterval);
return this;
}
/**
* How often (time in millis) in to run the idle checker background
* task. This option is only in use if the splitter strategy is IDLE.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 500
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder checkIdleInterval(
String checkIdleInterval) {
doSetProperty("checkIdleInterval", checkIdleInterval);
return this;
}
/**
* When reading a normal file, this is split into chunks producing a
* message per chunk.
*
* The option is a: <code>int</code> type.
*
* Default: 4096
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder chunkSize(int chunkSize) {
doSetProperty("chunkSize", chunkSize);
return this;
}
/**
* When reading a normal file, this is split into chunks producing a
* message per chunk.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 4096
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder chunkSize(String chunkSize) {
doSetProperty("chunkSize", chunkSize);
return this;
}
/**
* The compression codec to use.
*
* The option is a:
* <code>org.apache.camel.component.hdfs.HdfsCompressionCodec</code>
* type.
*
* Default: DEFAULT
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder compressionCodec(
HdfsCompressionCodec compressionCodec) {
doSetProperty("compressionCodec", compressionCodec);
return this;
}
/**
* The compression codec to use.
*
* The option will be converted to a
* <code>org.apache.camel.component.hdfs.HdfsCompressionCodec</code>
* type.
*
* Default: DEFAULT
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder compressionCodec(
String compressionCodec) {
doSetProperty("compressionCodec", compressionCodec);
return this;
}
/**
* The compression type to use (is default not in use).
*
* The option is a:
* <code>org.apache.hadoop.io.SequenceFile$CompressionType</code> type.
*
* Default: NONE
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder compressionType(
CompressionType compressionType) {
doSetProperty("compressionType", compressionType);
return this;
}
/**
* The compression type to use (is default not in use).
*
* The option will be converted to a
* <code>org.apache.hadoop.io.SequenceFile$CompressionType</code> type.
*
* Default: NONE
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder compressionType(
String compressionType) {
doSetProperty("compressionType", compressionType);
return this;
}
/**
* When a file is opened for reading/writing the file is renamed with
* this suffix to avoid to read it during the writing phase.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: opened
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder openedSuffix(String openedSuffix) {
doSetProperty("openedSuffix", openedSuffix);
return this;
}
/**
* Once the file has been read is renamed with this suffix to avoid to
* read it again.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: read
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder readSuffix(String readSuffix) {
doSetProperty("readSuffix", readSuffix);
return this;
}
/**
* The HDFS replication factor.
*
* The option is a: <code>short</code> type.
*
* Default: 3
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder replication(short replication) {
doSetProperty("replication", replication);
return this;
}
/**
* The HDFS replication factor.
*
* The option will be converted to a <code>short</code> type.
*
* Default: 3
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder replication(String replication) {
doSetProperty("replication", replication);
return this;
}
/**
* In the current version of Hadoop opening a file in append mode is
* disabled since it's not very reliable. So, for the moment, it's only
* possible to create new files. The Camel HDFS endpoint tries to solve
* this problem in this way: If the split strategy option has been
* defined, the hdfs path will be used as a directory and files will be
* created using the configured UuidGenerator. Every time a splitting
* condition is met, a new file is created. The splitStrategy option is
* defined as a string with the following syntax:
* splitStrategy=ST:value,ST:value,... where ST can be: BYTES a new file
* is created, and the old is closed when the number of written bytes is
* more than value MESSAGES a new file is created, and the old is closed
* when the number of written messages is more than value IDLE a new
* file is created, and the old is closed when no writing happened in
* the last value milliseconds.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder splitStrategy(String splitStrategy) {
doSetProperty("splitStrategy", splitStrategy);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder synchronous(boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedHdfsEndpointBuilder synchronous(String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Proxy enum for
* <code>org.apache.camel.component.hdfs.HdfsFileSystemType</code> enum.
*/
enum HdfsFileSystemType {
LOCAL,
HDFS;
}
/**
* Proxy enum for <code>org.apache.camel.component.hdfs.HdfsFileType</code>
* enum.
*/
enum HdfsFileType {
NORMAL_FILE,
SEQUENCE_FILE,
MAP_FILE,
BLOOMMAP_FILE,
ARRAY_FILE;
}
/**
* Proxy enum for <code>org.apache.camel.component.hdfs.WritableType</code>
* enum.
*/
enum WritableType {
NULL,
BOOLEAN,
BYTE,
INT,
FLOAT,
LONG,
DOUBLE,
TEXT,
BYTES;
}
/**
* Proxy enum for
* <code>org.apache.camel.component.hdfs.HdfsCompressionCodec</code> enum.
*/
enum HdfsCompressionCodec {
DEFAULT,
GZIP,
BZIP2;
}
/**
* Proxy enum for
* <code>org.apache.hadoop.io.SequenceFile$CompressionType</code> enum.
*/
enum CompressionType {
NONE,
RECORD,
BLOCK;
}
public interface HdfsBuilders {
/**
* HDFS (camel-hdfs)
* Read and write from/to an HDFS filesystem using Hadoop 2.x.
*
* Category: bigdata,hadoop,file
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-hdfs
*
* Syntax: <code>hdfs:hostName:port/path</code>
*
* Path parameter: hostName (required)
* HDFS host to use
*
* Path parameter: port
* HDFS port to use
* Default value: 8020
*
* Path parameter: path (required)
* The directory path to use
*
* @param path hostName:port/path
*/
default HdfsEndpointBuilder hdfs(String path) {
return HdfsEndpointBuilderFactory.endpointBuilder("hdfs", path);
}
/**
* HDFS (camel-hdfs)
* Read and write from/to an HDFS filesystem using Hadoop 2.x.
*
* Category: bigdata,hadoop,file
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-hdfs
*
* Syntax: <code>hdfs:hostName:port/path</code>
*
* Path parameter: hostName (required)
* HDFS host to use
*
* Path parameter: port
* HDFS port to use
* Default value: 8020
*
* Path parameter: path (required)
* The directory path to use
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path hostName:port/path
*/
default HdfsEndpointBuilder hdfs(String componentName, String path) {
return HdfsEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static HdfsEndpointBuilder endpointBuilder(String componentName, String path) {
class HdfsEndpointBuilderImpl extends AbstractEndpointBuilder implements HdfsEndpointBuilder, AdvancedHdfsEndpointBuilder {
public HdfsEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new HdfsEndpointBuilderImpl(path);
}
} | apache-2.0 |
vybs/sqoop-on-spark | common/src/main/java/org/apache/sqoop/model/MMapInput.java | 3766 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.model;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.apache.sqoop.classification.InterfaceAudience;
import org.apache.sqoop.classification.InterfaceStability;
import org.apache.sqoop.utils.UrlSafeUtils;
@InterfaceAudience.Public
@InterfaceStability.Unstable
public final class MMapInput extends MInput<Map<String, String>> {
public MMapInput(String name, boolean sensitive, InputEditable editable, String overrides) {
super(name, sensitive, editable, overrides);
}
@Override
public String getUrlSafeValueString() {
Map<String, String> valueMap = getValue();
if (valueMap == null) {
return null;
}
boolean first = true;
StringBuilder vsb = new StringBuilder();
for (Map.Entry<String, String> entry : valueMap.entrySet()) {
if (first) {
first = false;
} else {
vsb.append("&");
}
vsb.append(UrlSafeUtils.urlEncode(entry.getKey())).append("=");
vsb.append(entry.getValue() != null ? UrlSafeUtils.urlEncode(entry.getValue()): null);
}
return vsb.toString();
}
@Override
public void restoreFromUrlSafeValueString(String valueString) {
if (valueString == null) {
setValue(null);
} else {
Map<String, String> valueMap = new HashMap<String, String>();
if (valueString.trim().length() > 0) {
String[] valuePairs = valueString.split("&");
for (String pair : valuePairs) {
String[] nameAndVal = pair.split("=");
if (nameAndVal.length > 0) {
String name = nameAndVal[0];
String value = null;
if (nameAndVal.length > 1) {
value = nameAndVal[1];
}
valueMap.put(name, value);
}
}
}
setValue(valueMap);
}
}
@Override
public MInputType getType() {
return MInputType.MAP;
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if (!(other instanceof MMapInput)) {
return false;
}
MMapInput mmi = (MMapInput) other;
return getName().equals(mmi.getName());
}
@Override
public int hashCode() {
return 23 + 31 * getName().hashCode();
}
@Override
public boolean isEmpty() {
return getValue() == null;
}
@Override
public void setEmpty() {
setValue(null);
}
@Override
public MMapInput clone(boolean cloneWithValue) {
MMapInput copy = new MMapInput(getName(), isSensitive(), getEditable(), getOverrides());
copy.setPersistenceId(getPersistenceId());
if(cloneWithValue && this.getValue() != null) {
Map<String, String> copyMap = new HashMap<String, String>();
Set<Map.Entry<String, String>> entry = this.getValue().entrySet();
for(Map.Entry<String, String> itr : entry) {
copyMap.put(itr.getKey(), itr.getValue());
}
copy.setValue(copyMap);
}
return copy;
}
}
| apache-2.0 |
Kyakujin/AutoEco | src/com/kyakujin/android/autoeco/DebugUtils.java | 825 | package com.kyakujin.android.autoeco;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
/**
* デバッグの設定に関するクラス
*/
public class DebugUtils {
public static boolean isDebuggable(Context context) {
PackageManager manager = context.getPackageManager();
ApplicationInfo appInfo = null;
try {
appInfo = manager.getApplicationInfo(context.getPackageName(), 0);
} catch(NameNotFoundException e) {
return false;
}
if ((appInfo.flags & ApplicationInfo.FLAG_DEBUGGABLE)
== ApplicationInfo.FLAG_DEBUGGABLE) {
return true;
}
return false;
}
}
| apache-2.0 |
zibhub/GNDMS | logic/src/de/zib/gndms/logic/action/NoSuchActionException.java | 1012 | /*
* Copyright 2008-${YEAR} Zuse Institute Berlin (ZIB)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.zib.gndms.logic.action;
/**
* @author Maik Jorra
* @email jorra@zib.de
* @date 19.07.11 16:07
* @brief
*/
public class NoSuchActionException extends IllegalArgumentException {
private static final long serialVersionUID = -3233268902144500877L;
public NoSuchActionException( String actionName ) {
super( "No action named " + actionName );
}
}
| apache-2.0 |
kamaci/gora | gora-mongodb/src/main/java/org/apache/gora/mongodb/store/MongoStore.java | 37396 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gora.mongodb.store;
import static com.mongodb.AuthenticationMechanism.GSSAPI;
import static com.mongodb.AuthenticationMechanism.MONGODB_CR;
import static com.mongodb.AuthenticationMechanism.MONGODB_X509;
import static com.mongodb.AuthenticationMechanism.PLAIN;
import static com.mongodb.AuthenticationMechanism.SCRAM_SHA_1;
import java.io.IOException;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.TimeZone;
import java.util.concurrent.ConcurrentHashMap;
import javax.xml.bind.DatatypeConverter;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.util.Utf8;
import org.apache.gora.mongodb.filters.MongoFilterUtil;
import org.apache.gora.mongodb.query.MongoDBQuery;
import org.apache.gora.mongodb.query.MongoDBResult;
import org.apache.gora.mongodb.store.MongoMapping.DocumentFieldType;
import org.apache.gora.mongodb.utils.BSONDecorator;
import org.apache.gora.mongodb.utils.GoraDBEncoder;
import org.apache.gora.persistency.impl.BeanFactoryImpl;
import org.apache.gora.persistency.impl.DirtyListWrapper;
import org.apache.gora.persistency.impl.DirtyMapWrapper;
import org.apache.gora.persistency.impl.PersistentBase;
import org.apache.gora.query.PartitionQuery;
import org.apache.gora.query.Query;
import org.apache.gora.query.Result;
import org.apache.gora.query.impl.PartitionQueryImpl;
import org.apache.gora.store.impl.DataStoreBase;
import org.apache.gora.util.AvroUtils;
import org.apache.gora.util.ClassLoadingUtils;
import org.apache.gora.util.GoraException;
import org.bson.types.ObjectId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Splitter;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.Bytes;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoCredential;
import com.mongodb.ReadPreference;
import com.mongodb.ServerAddress;
import com.mongodb.WriteConcern;
import com.mongodb.WriteResult;
/**
* Implementation of a MongoDB data store to be used by gora.
*
* @param <K>
* class to be used for the key
* @param <T>
* class to be persisted within the store
* @author Fabien Poulard fpoulard@dictanova.com
* @author Damien Raude-Morvan draudemorvan@dictanova.com
*/
public class MongoStore<K, T extends PersistentBase> extends
DataStoreBase<K, T> {
public static final Logger LOG = LoggerFactory.getLogger(MongoStore.class);
/**
* Default value for mapping file
*/
public static final String DEFAULT_MAPPING_FILE = "/gora-mongodb-mapping.xml";
/**
* MongoDB client
*/
private static ConcurrentHashMap<String, MongoClient> mapsOfClients = new ConcurrentHashMap<>();
private DB mongoClientDB;
private DBCollection mongoClientColl;
/**
* Mapping definition for MongoDB
*/
private MongoMapping mapping;
private MongoFilterUtil<K, T> filterUtil;
public MongoStore() {
// Create a default mapping that will be overriden in initialize method
this.mapping = new MongoMapping();
}
/**
* Initialize the data store by reading the credentials, setting the client's
* properties up and reading the mapping file.
*/
public void initialize(final Class<K> keyClass,
final Class<T> pPersistentClass, final Properties properties) throws GoraException {
try {
LOG.debug("Initializing MongoDB store");
MongoStoreParameters parameters = MongoStoreParameters.load(properties, getConf());
super.initialize(keyClass, pPersistentClass, properties);
filterUtil = new MongoFilterUtil<>(getConf());
// Load the mapping
MongoMappingBuilder<K, T> builder = new MongoMappingBuilder<>(this);
LOG.debug("Initializing Mongo store with mapping {}.",
new Object[] { parameters.getMappingFile() });
builder.fromFile(parameters.getMappingFile());
mapping = builder.build();
// Prepare MongoDB connection
mongoClientDB = getDB(parameters);
mongoClientColl = mongoClientDB
.getCollection(mapping.getCollectionName());
LOG.info("Initialized Mongo store for database {} of {}.", new Object[] {
parameters.getDbname(), parameters.getServers() });
} catch (GoraException e) {
throw e;
} catch (IOException e) {
LOG.error("Error while initializing MongoDB store", e);
throw new GoraException(e);
}
}
/**
* Retrieve a client connected to the MongoDB server to be used.
*
* @param params This value should specify the host:port (at least one) for
* connecting to remote MongoDB.
* @return a {@link Mongo} instance connected to the server
* @throws UnknownHostException
*/
private MongoClient getClient(MongoStoreParameters params)
throws UnknownHostException {
// Configure options
MongoClientOptions.Builder optBuilder = new MongoClientOptions.Builder()
.dbEncoderFactory(GoraDBEncoder.FACTORY); // Utf8 serialization!
if (params.getReadPreference() != null) {
optBuilder.readPreference(ReadPreference.valueOf(params.getReadPreference()));
}
if (params.getWriteConcern() != null) {
optBuilder.writeConcern(WriteConcern.valueOf(params.getWriteConcern()));
}
// If configuration contains a login + secret, try to authenticated with DB
List<MongoCredential> credentials = new ArrayList<>();
if (params.getLogin() != null && params.getSecret() != null) {
credentials.add(createCredential(params.getAuthenticationType(), params.getLogin(), params.getDbname(), params.getSecret()));
}
// Build server address
List<ServerAddress> addrs = new ArrayList<>();
Iterable<String> serversArray = Splitter.on(",").split(params.getServers());
if (serversArray != null) {
for (String server : serversArray) {
Iterator<String> paramsIterator = Splitter.on(":").trimResults().split(server).iterator();
if (!paramsIterator.hasNext()) {
// No server, use default
addrs.add(new ServerAddress());
} else {
String host = paramsIterator.next();
if (paramsIterator.hasNext()) {
String port = paramsIterator.next();
addrs.add(new ServerAddress(host, Integer.parseInt(port)));
} else {
addrs.add(new ServerAddress(host));
}
}
}
}
// Connect to the Mongo server
return new MongoClient(addrs, credentials, optBuilder.build());
}
/**
* This method creates credentials according to the Authentication type.
*
* @param authenticationType authentication Type (Authentication Mechanism)
* @param username username
* @param database database
* @param password password
* @return Mongo Crendential
* @see <a href="http://api.mongodb.com/java/current/com/mongodb/AuthenticationMechanism.html">AuthenticationMechanism in MongoDB Java Driver</a>
*/
private MongoCredential createCredential(String authenticationType, String username, String database, String password) {
MongoCredential credential = null;
if (PLAIN.getMechanismName().equals(authenticationType)) {
credential = MongoCredential.createPlainCredential(username, database, password.toCharArray());
} else if (SCRAM_SHA_1.getMechanismName().equals(authenticationType)) {
credential = MongoCredential.createScramSha1Credential(username, database, password.toCharArray());
} else if (MONGODB_CR.getMechanismName().equals(authenticationType)) {
credential = MongoCredential.createMongoCRCredential(username, database, password.toCharArray());
} else if (GSSAPI.getMechanismName().equals(authenticationType)) {
credential = MongoCredential.createGSSAPICredential(username);
} else if (MONGODB_X509.getMechanismName().equals(authenticationType)) {
credential = MongoCredential.createMongoX509Credential(username);
} else {
credential = MongoCredential.createCredential(username, database, password.toCharArray());
}
return credential;
}
/**
* Get reference to Mongo DB, using credentials if not null.
*/
private DB getDB(MongoStoreParameters parameters) throws UnknownHostException {
// Get reference to Mongo DB
if (!mapsOfClients.containsKey(parameters.getServers()))
mapsOfClients.put(parameters.getServers(), getClient(parameters));
DB db = mapsOfClients.get(parameters.getServers()).getDB(parameters.getDbname());
return db;
}
public MongoMapping getMapping() {
return mapping;
}
/**
* Accessor to the name of the collection used.
*/
@Override
public String getSchemaName() {
return mapping.getCollectionName();
}
@Override
public String getSchemaName(final String mappingSchemaName,
final Class<?> persistentClass) {
return super.getSchemaName(mappingSchemaName, persistentClass);
}
/**
* Create a new collection in MongoDB if necessary.
*/
@Override
public void createSchema() throws GoraException {
if (mongoClientDB == null)
throw new GoraException(
"Impossible to create the schema as no database has been selected.");
if (schemaExists()) {
return;
}
try {
// If initialized create the collection
mongoClientColl = mongoClientDB.createCollection(
mapping.getCollectionName(), new BasicDBObject()); // send a DBObject to
// force creation
// otherwise creation is deferred
mongoClientColl.setDBEncoderFactory(GoraDBEncoder.FACTORY);
LOG.debug("Collection {} has been created for Mongo instance {}.",
new Object[] { mapping.getCollectionName(), mongoClientDB.getMongo() });
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* Drop the collection.
*/
@Override
public void deleteSchema() throws GoraException {
if (mongoClientColl == null)
throw new GoraException(
"Impossible to delete the schema as no schema is selected.");
try {
// If initialized, simply drop the collection
mongoClientColl.drop();
LOG.debug(
"Collection {} has been dropped for Mongo instance {}.",
new Object[] { mongoClientColl.getFullName(), mongoClientDB.getMongo() });
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* Check if the collection already exists or should be created.
*/
@Override
public boolean schemaExists() throws GoraException {
try {
return mongoClientDB.collectionExists(mapping.getCollectionName());
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* Ensure the data is synced to disk.
*/
@Override
public void flush() throws GoraException {
try {
for (MongoClient client : mapsOfClients.values()) {
client.fsync(false);
LOG.debug("Forced synced of database for Mongo instance {}.",
new Object[] { client });
}
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* Release the resources linked to this collection
*/
@Override
public void close() {
}
/**
* Retrieve an entry from the store with only selected fields.
*
* @param key
* identifier of the document in the database
* @param fields
* list of fields to be loaded from the database
*/
@Override
public T get(final K key, final String[] fields) throws GoraException {
try {
String[] dbFields = getFieldsToQuery(fields);
// Prepare the MongoDB query
BasicDBObject q = new BasicDBObject("_id", key);
BasicDBObject proj = new BasicDBObject();
for (String field : dbFields) {
String docf = mapping.getDocumentField(field);
if (docf != null) {
proj.put(docf, true);
}
}
// Execute the query
DBObject res = mongoClientColl.findOne(q, proj);
// Build the corresponding persistent
return newInstance(res, dbFields);
} catch (Exception e) {
throw new GoraException(e);
}
}
@Override
public boolean exists(final K key) throws GoraException {
try {
// Prepare the MongoDB query
BasicDBObject q = new BasicDBObject("_id", key);
BasicDBObject proj = new BasicDBObject();
// Execute the query
DBObject res = mongoClientColl.findOne(q, proj);
return res != null;
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* Persist an object into the store.
*
* @param key
* identifier of the object in the store
* @param obj
* the object to be inserted
*/
@Override
public void put(final K key, final T obj) throws GoraException {
try {
// Save the object in the database
if (obj.isDirty()) {
performPut(key, obj);
} else {
LOG.info("Ignored putting object {} in the store as it is neither "
+ "new, neither dirty.", new Object[] { obj });
}
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* Update a object that already exists in the store. The object must exist
* already or the update may fail.
*
* @param key
* identifier of the object in the store
* @param obj
* the object to be inserted
*/
private void performPut(final K key, final T obj) {
// Build the query to select the object to be updated
DBObject qSel = new BasicDBObject("_id", key);
// Build the update query
BasicDBObject qUpdate = new BasicDBObject();
BasicDBObject qUpdateSet = newUpdateSetInstance(obj);
if (qUpdateSet.size() > 0) {
qUpdate.put("$set", qUpdateSet);
}
BasicDBObject qUpdateUnset = newUpdateUnsetInstance(obj);
if (qUpdateUnset.size() > 0) {
qUpdate.put("$unset", qUpdateUnset);
}
// Execute the update (if there is at least one $set ot $unset
if (!qUpdate.isEmpty()) {
mongoClientColl.update(qSel, qUpdate, true, false);
obj.clearDirty();
} else {
LOG.debug("No update to perform, skip {}", key);
}
}
@Override
public boolean delete(final K key) throws GoraException {
try {
DBObject removeKey = new BasicDBObject("_id", key);
WriteResult writeResult = mongoClientColl.remove(removeKey);
return writeResult != null && writeResult.getN() > 0;
} catch (Exception e) {
throw new GoraException(e);
}
}
@Override
public long deleteByQuery(final Query<K, T> query) throws GoraException {
try {
// Build the actual MongoDB query
DBObject q = MongoDBQuery.toDBQuery(query);
WriteResult writeResult = mongoClientColl.remove(q);
if (writeResult != null) {
return writeResult.getN();
}
return 0;
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* Execute the query and return the result.
*/
@Override
public Result<K, T> execute(final Query<K, T> query) throws GoraException {
try {
String[] fields = getFieldsToQuery(query.getFields());
// Build the actual MongoDB query
DBObject q = MongoDBQuery.toDBQuery(query);
DBObject p = MongoDBQuery.toProjection(fields, mapping);
if (query.getFilter() != null) {
boolean succeeded = filterUtil.setFilter(q, query.getFilter(), this);
if (succeeded) {
// don't need local filter
query.setLocalFilterEnabled(false);
}
}
// Execute the query on the collection
DBCursor cursor = mongoClientColl.find(q, p);
if (query.getLimit() > 0)
cursor = cursor.limit((int) query.getLimit());
cursor.batchSize(100);
cursor.addOption(Bytes.QUERYOPTION_NOTIMEOUT);
// Build the result
MongoDBResult<K, T> mongoResult = new MongoDBResult<>(this, query);
mongoResult.setCursor(cursor);
return mongoResult;
} catch(Exception e) {
throw new GoraException(e);
}
}
/**
* Create a new {@link Query} to query the datastore.
*/
@Override
public Query<K, T> newQuery() {
MongoDBQuery<K, T> query = new MongoDBQuery<>(this);
query.setFields(getFieldsToQuery(null));
return query;
}
/**
* Partitions the given query and returns a list of PartitionQuerys, which
* will execute on local data.
*/
@Override
public List<PartitionQuery<K, T>> getPartitions(final Query<K, T> query)
throws IOException {
// FIXME: for now, there is only one partition as we do not handle
// MongoDB sharding configuration
List<PartitionQuery<K, T>> partitions = new ArrayList<>();
PartitionQueryImpl<K, T> partitionQuery = new PartitionQueryImpl<>(
query);
partitionQuery.setConf(getConf());
partitions.add(partitionQuery);
return partitions;
}
// //////////////////////////////////////////////////////// DESERIALIZATION
/**
* Build a new instance of the persisted class from the {@link DBObject}
* retrieved from the database.
*
* @param obj
* the {@link DBObject} that results from the query to the database
* @param fields
* the list of fields to be mapped to the persistence class instance
* @return a persistence class instance which content was deserialized from
* the {@link DBObject}
* @throws GoraException
*/
public T newInstance(final DBObject obj, final String[] fields) throws GoraException {
if (obj == null)
return null;
BSONDecorator easybson = new BSONDecorator(obj);
// Create new empty persistent bean instance
T persistent = newPersistent();
String[] dbFields = getFieldsToQuery(fields);
// Populate each field
for (String f : dbFields) {
// Check the field exists in the mapping and in the db
String docf = mapping.getDocumentField(f);
if (docf == null || !easybson.containsField(docf))
continue;
DocumentFieldType storeType = mapping.getDocumentFieldType(docf);
Field field = fieldMap.get(f);
Schema fieldSchema = field.schema();
LOG.debug(
"Load from DBObject (MAIN), field:{}, schemaType:{}, docField:{}, storeType:{}",
new Object[] { field.name(), fieldSchema.getType(), docf, storeType });
Object result = fromDBObject(fieldSchema, storeType, field, docf,
easybson);
persistent.put(field.pos(), result);
}
persistent.clearDirty();
return persistent;
}
private Object fromDBObject(final Schema fieldSchema,
final DocumentFieldType storeType, final Field field, final String docf,
final BSONDecorator easybson) throws GoraException {
Object result = null;
switch (fieldSchema.getType()) {
case MAP:
result = fromMongoMap(docf, fieldSchema, easybson, field);
break;
case ARRAY:
result = fromMongoList(docf, fieldSchema, easybson, field);
break;
case RECORD:
DBObject rec = easybson.getDBObject(docf);
if (rec == null) {
result = null;
break;
}
result = fromMongoRecord(fieldSchema, docf, rec);
break;
case BOOLEAN:
result = easybson.getBoolean(docf);
break;
case DOUBLE:
result = easybson.getDouble(docf);
break;
case FLOAT:
result = easybson.getDouble(docf).floatValue();
break;
case INT:
result = easybson.getInt(docf);
break;
case LONG:
result = easybson.getLong(docf);
break;
case STRING:
result = fromMongoString(storeType, docf, easybson);
break;
case ENUM:
result = AvroUtils.getEnumValue(fieldSchema, easybson.getUtf8String(docf)
.toString());
break;
case BYTES:
case FIXED:
result = easybson.getBytes(docf);
break;
case NULL:
result = null;
break;
case UNION:
result = fromMongoUnion(fieldSchema, storeType, field, docf, easybson);
break;
default:
LOG.warn("Unable to read {}", docf);
break;
}
return result;
}
private Object fromMongoUnion(final Schema fieldSchema,
final DocumentFieldType storeType, final Field field, final String docf,
final BSONDecorator easybson) throws GoraException {
Object result;// schema [type0, type1]
Type type0 = fieldSchema.getTypes().get(0).getType();
Type type1 = fieldSchema.getTypes().get(1).getType();
// Check if types are different and there's a "null", like ["null","type"]
// or ["type","null"]
if (!type0.equals(type1)
&& (type0.equals(Type.NULL) || type1.equals(Type.NULL))) {
Schema innerSchema = fieldSchema.getTypes().get(1);
LOG.debug(
"Load from DBObject (UNION), schemaType:{}, docField:{}, storeType:{}",
new Object[] { innerSchema.getType(), docf, storeType });
// Deserialize as if schema was ["type"]
result = fromDBObject(innerSchema, storeType, field, docf, easybson);
} else {
throw new IllegalStateException(
"MongoStore doesn't support 3 types union field yet. Please update your mapping");
}
return result;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private Object fromMongoRecord(final Schema fieldSchema, final String docf,
final DBObject rec) throws GoraException {
Object result;
BSONDecorator innerBson = new BSONDecorator(rec);
Class<?> clazz = null;
try {
clazz = ClassLoadingUtils.loadClass(fieldSchema.getFullName());
} catch (ClassNotFoundException e) {
}
PersistentBase record = (PersistentBase) new BeanFactoryImpl(keyClass, clazz).newPersistent();
for (Field recField : fieldSchema.getFields()) {
Schema innerSchema = recField.schema();
DocumentFieldType innerStoreType = mapping
.getDocumentFieldType(innerSchema.getName());
String innerDocField = mapping.getDocumentField(recField.name()) != null ? mapping
.getDocumentField(recField.name()) : recField.name();
String fieldPath = docf + "." + innerDocField;
LOG.debug(
"Load from DBObject (RECORD), field:{}, schemaType:{}, docField:{}, storeType:{}",
new Object[] { recField.name(), innerSchema.getType(), fieldPath,
innerStoreType });
record.put(
recField.pos(),
fromDBObject(innerSchema, innerStoreType, recField, innerDocField,
innerBson));
}
result = record;
return result;
}
/* pp */ Object fromMongoList(final String docf, final Schema fieldSchema,
final BSONDecorator easybson, final Field f) throws GoraException {
List<Object> list = easybson.getDBList(docf);
List<Object> rlist = new ArrayList<>();
if (list == null) {
return new DirtyListWrapper(rlist);
}
for (Object item : list) {
DocumentFieldType storeType = mapping.getDocumentFieldType(docf);
Object o = fromDBObject(fieldSchema.getElementType(), storeType, f,
"item", new BSONDecorator(new BasicDBObject("item", item)));
rlist.add(o);
}
return new DirtyListWrapper<>(rlist);
}
/* pp */ Object fromMongoMap(final String docf, final Schema fieldSchema,
final BSONDecorator easybson, final Field f) throws GoraException {
BasicDBObject map = easybson.getDBObject(docf);
Map<Utf8, Object> rmap = new HashMap<>();
if (map == null) {
return new DirtyMapWrapper(rmap);
}
for (Entry<String, Object> e : map.entrySet()) {
String mapKey = e.getKey();
String decodedMapKey = decodeFieldKey(mapKey);
DocumentFieldType storeType = mapping.getDocumentFieldType(docf);
Object o = fromDBObject(fieldSchema.getValueType(), storeType, f, mapKey,
new BSONDecorator(map));
rmap.put(new Utf8(decodedMapKey), o);
}
return new DirtyMapWrapper<>(rmap);
}
private Object fromMongoString(final DocumentFieldType storeType,
final String docf, final BSONDecorator easybson) {
Object result;
if (storeType == DocumentFieldType.OBJECTID) {
// Try auto-conversion of BSON data to ObjectId
// It will work if data is stored as String or as ObjectId
Object bin = easybson.get(docf);
if (bin instanceof String) {
ObjectId id = new ObjectId((String) bin);
result = new Utf8(id.toString());
} else {
result = new Utf8(bin.toString());
}
} else if (storeType == DocumentFieldType.DATE) {
Object bin = easybson.get(docf);
if (bin instanceof Date) {
Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.getDefault());
calendar.setTime((Date) bin);
result = new Utf8(DatatypeConverter.printDateTime(calendar));
} else {
result = new Utf8(bin.toString());
}
} else {
result = easybson.getUtf8String(docf);
}
return result;
}
// ////////////////////////////////////////////////////////// SERIALIZATION
/**
* Build a new instance of {@link DBObject} from the persistence class
* instance in parameter. Limit the {@link DBObject} to the fields that are
* dirty and not null, that is the fields that will need to be updated in the
* store.
*
* @param persistent
* a persistence class instance which content is to be serialized as
* a {@link DBObject} for use as parameter of a $set operator
* @return a {@link DBObject} which content corresponds to the fields that
* have to be updated... and formatted to be passed in parameter of a
* $set operator
*/
private BasicDBObject newUpdateSetInstance(final T persistent) {
BasicDBObject result = new BasicDBObject();
for (Field f : persistent.getSchema().getFields()) {
if (persistent.isDirty(f.pos()) && (persistent.get(f.pos()) != null)) {
String docf = mapping.getDocumentField(f.name());
Object value = persistent.get(f.pos());
DocumentFieldType storeType = mapping.getDocumentFieldType(docf);
LOG.debug(
"Transform value to DBObject (MAIN), docField:{}, schemaType:{}, storeType:{}",
new Object[] { docf, f.schema().getType(), storeType });
Object o = toDBObject(docf, f.schema(), f.schema().getType(),
storeType, value);
result.put(docf, o);
}
}
return result;
}
/**
* Build a new instance of {@link DBObject} from the persistence class
* instance in parameter. Limit the {@link DBObject} to the fields that are
* dirty and null, that is the fields that will need to be updated in the
* store by being removed.
*
* @param persistent
* a persistence class instance which content is to be serialized as
* a {@link DBObject} for use as parameter of a $set operator
* @return a {@link DBObject} which content corresponds to the fields that
* have to be updated... and formated to be passed in parameter of a
* $unset operator
*/
private BasicDBObject newUpdateUnsetInstance(final T persistent) {
BasicDBObject result = new BasicDBObject();
for (Field f : persistent.getSchema().getFields()) {
if (persistent.isDirty(f.pos()) && (persistent.get(f.pos()) == null)) {
String docf = mapping.getDocumentField(f.name());
Object value = persistent.get(f.pos());
DocumentFieldType storeType = mapping.getDocumentFieldType(docf);
LOG.debug(
"Transform value to DBObject (MAIN), docField:{}, schemaType:{}, storeType:{}",
new Object[] { docf, f.schema().getType(), storeType });
Object o = toDBObject(docf, f.schema(), f.schema().getType(),
storeType, value);
result.put(docf, o);
}
}
return result;
}
@SuppressWarnings("unchecked")
private Object toDBObject(final String docf, final Schema fieldSchema,
final Type fieldType, final DocumentFieldType storeType,
final Object value) {
Object result = null;
switch (fieldType) {
case MAP:
if (storeType != null && storeType != DocumentFieldType.DOCUMENT) {
throw new IllegalStateException(
"Field "
+ fieldSchema.getType()
+ ": to store a Gora 'map', target Mongo mapping have to be of 'document' type");
}
Schema valueSchema = fieldSchema.getValueType();
result = mapToMongo(docf, (Map<CharSequence, ?>) value, valueSchema,
valueSchema.getType());
break;
case ARRAY:
if (storeType != null && storeType != DocumentFieldType.LIST) {
throw new IllegalStateException(
"Field "
+ fieldSchema.getType()
+ ": To store a Gora 'array', target Mongo mapping have to be of 'list' type");
}
Schema elementSchema = fieldSchema.getElementType();
result = listToMongo(docf, (List<?>) value, elementSchema,
elementSchema.getType());
break;
case BYTES:
if (value != null) {
result = ((ByteBuffer) value).array();
}
break;
case INT:
case LONG:
case FLOAT:
case DOUBLE:
case BOOLEAN:
result = value;
break;
case STRING:
result = stringToMongo(fieldSchema, storeType, value);
break;
case ENUM:
if (value != null)
result = value.toString();
break;
case RECORD:
if (value == null)
break;
result = recordToMongo(docf, fieldSchema, value);
break;
case UNION:
result = unionToMongo(docf, fieldSchema, storeType, value);
break;
case FIXED:
result = value;
break;
default:
LOG.error("Unknown field type: {}", fieldSchema.getType());
break;
}
return result;
}
private Object unionToMongo(final String docf, final Schema fieldSchema,
final DocumentFieldType storeType, final Object value) {
Object result;// schema [type0, type1]
Type type0 = fieldSchema.getTypes().get(0).getType();
Type type1 = fieldSchema.getTypes().get(1).getType();
// Check if types are different and there's a "null", like ["null","type"]
// or ["type","null"]
if (!type0.equals(type1)
&& (type0.equals(Type.NULL) || type1.equals(Type.NULL))) {
Schema innerSchema = fieldSchema.getTypes().get(1);
LOG.debug(
"Transform value to DBObject (UNION), schemaType:{}, type1:{}, storeType:{}",
new Object[] { innerSchema.getType(), type1, storeType });
// Deserialize as if schema was ["type"]
result = toDBObject(docf, innerSchema, type1, storeType, value);
} else {
throw new IllegalStateException(
"MongoStore doesn't support 3 types union field yet. Please update your mapping");
}
return result;
}
private BasicDBObject recordToMongo(final String docf,
final Schema fieldSchema, final Object value) {
BasicDBObject record = new BasicDBObject();
for (Field member : fieldSchema.getFields()) {
Object innerValue = ((PersistentBase) value).get(member.pos());
String innerDoc = mapping.getDocumentField(member.name());
Type innerType = member.schema().getType();
DocumentFieldType innerStoreType = mapping.getDocumentFieldType(innerDoc);
LOG.debug(
"Transform value to DBObject (RECORD), docField:{}, schemaType:{}, storeType:{}",
new Object[] { member.name(), member.schema().getType(),
innerStoreType });
record.put(
member.name(),
toDBObject(docf, member.schema(), innerType, innerStoreType,
innerValue));
}
return record;
}
private Object stringToMongo(final Schema fieldSchema,
final DocumentFieldType storeType, final Object value) {
Object result = null;
if (storeType == DocumentFieldType.OBJECTID) {
if (value != null) {
ObjectId id;
try {
id = new ObjectId(value.toString());
} catch (IllegalArgumentException e1) {
// Unable to parse anything from Utf8 value, throw error
throw new IllegalStateException("Field " + fieldSchema.getType()
+ ": Invalid string: unable to convert to ObjectId");
}
result = id;
}
} else if (storeType == DocumentFieldType.DATE) {
if (value != null) {
// Try to parse date from Utf8 value
Calendar calendar = null;
try {
// Parse as date + time
calendar = DatatypeConverter.parseDateTime(value.toString());
} catch (IllegalArgumentException e1) {
try {
// Parse as date only
calendar = DatatypeConverter.parseDate(value.toString());
} catch (IllegalArgumentException e2) {
// No-op
}
}
if (calendar == null) {
// Unable to parse anything from Utf8 value, throw error
throw new IllegalStateException("Field " + fieldSchema.getType()
+ ": Invalid date format '" + value + "'");
}
result = calendar.getTime();
}
} else {
if (value != null) {
result = value.toString();
}
}
return result;
}
/**
* Convert a Java Map as used in Gora generated classes to a Map that can
* safely be serialized into MongoDB.
*
* @param value
* the Java Map that must be serialized into a MongoDB object
* @param fieldType
* type of the values within the map
* @return a {@link BasicDBObject} version of the {@link Map} that can be
* safely serialized into MongoDB.
*/
private BasicDBObject mapToMongo(final String docf,
final Map<CharSequence, ?> value, final Schema fieldSchema,
final Type fieldType) {
BasicDBObject map = new BasicDBObject();
// Handle null case
if (value == null)
return map;
// Handle regular cases
for (Entry<CharSequence, ?> e : value.entrySet()) {
String mapKey = e.getKey().toString();
String encodedMapKey = encodeFieldKey(mapKey);
Object mapValue = e.getValue();
DocumentFieldType storeType = mapping.getDocumentFieldType(docf);
Object result = toDBObject(docf, fieldSchema, fieldType, storeType,
mapValue);
map.put(encodedMapKey, result);
}
return map;
}
/**
* Convert a Java {@link GenericArray} as used in Gora generated classes to a
* List that can safely be serialized into MongoDB.
*
* @param array
* the {@link GenericArray} to be serialized
* @param fieldType
* type of the elements within the array
* @return a {@link BasicDBList} version of the {@link GenericArray} that can
* be safely serialized into MongoDB.
*/
private BasicDBList listToMongo(final String docf, final Collection<?> array,
final Schema fieldSchema, final Type fieldType) {
BasicDBList list = new BasicDBList();
// Handle null case
if (array == null)
return list;
// Handle regular cases
for (Object item : array) {
DocumentFieldType storeType = mapping.getDocumentFieldType(docf);
Object result = toDBObject(docf, fieldSchema, fieldType, storeType, item);
list.add(result);
}
return list;
}
// //////////////////////////////////////////////////////// CLEANUP
/**
* Ensure Key encoding -> dots replaced with middle dots
*
* @param key
* char with only dots.
* @return encoded string with "\u00B7" chars..
*/
public String encodeFieldKey(final String key) {
if (key == null) {
return null;
}
return key.replace(".", "\u00B7");
}
/**
* Ensure Key decoding -> middle dots replaced with dots
*
* @param key
* encoded string with "\u00B7" chars.
* @return Cleanup up char with only dots.
*/
public String decodeFieldKey(final String key) {
if (key == null) {
return null;
}
return key.replace("\u00B7", ".");
}
}
| apache-2.0 |
ilya-moskovtsev/imoskovtsev | junior/chapter_001_collections_pro/src/main/java/ru/job4j/list/DynamicArrayList.java | 1733 | package ru.job4j.list;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.NoSuchElementException;
public class DynamicArrayList<E> implements Iterable<E> {
@SuppressWarnings("unchecked")
private E[] array = (E[]) new Object[10];
private int index = 0;
private int modificationCounter = 0;
public void add(E element) {
modificationCounter++;
enlargeIfNeeded();
array[index++] = element;
}
public E get(int index) {
return array[index];
}
public Iterator<E> iterator() {
return new Iterator<>() {
private int expectedModificationCounter = modificationCounter;
private int iteratorIndex = 0;
@Override
public boolean hasNext() {
if (modificationCounter != expectedModificationCounter) {
throw new ConcurrentModificationException();
}
return iteratorIndex < index;
}
@Override
public E next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
if (modificationCounter != expectedModificationCounter) {
throw new ConcurrentModificationException();
}
return array[iteratorIndex++];
}
};
}
private void enlargeIfNeeded() {
if (array.length <= index) {
@SuppressWarnings("unchecked")
E[] longerArray = (E[]) new Object[(int) (array.length * 1.5)];
System.arraycopy(array, 0, longerArray, 0, array.length);
this.array = longerArray;
}
}
} | apache-2.0 |
haikuowuya/android_system_code | src/javax/swing/Timer.java | 19660 | /*
* Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package javax.swing;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.*;
import java.awt.*;
import java.awt.event.*;
import java.io.Serializable;
import java.io.*;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.PrivilegedAction;
import javax.swing.event.EventListenerList;
/**
* Fires one or more {@code ActionEvent}s at specified
* intervals. An example use is an animation object that uses a
* <code>Timer</code> as the trigger for drawing its frames.
*<p>
* Setting up a timer
* involves creating a <code>Timer</code> object,
* registering one or more action listeners on it,
* and starting the timer using
* the <code>start</code> method.
* For example,
* the following code creates and starts a timer
* that fires an action event once per second
* (as specified by the first argument to the <code>Timer</code> constructor).
* The second argument to the <code>Timer</code> constructor
* specifies a listener to receive the timer's action events.
*
*<pre>
* int delay = 1000; //milliseconds
* ActionListener taskPerformer = new ActionListener() {
* public void actionPerformed(ActionEvent evt) {
* <em>//...Perform a task...</em>
* }
* };
* new Timer(delay, taskPerformer).start();</pre>
*
* <p>
* {@code Timers} are constructed by specifying both a delay parameter
* and an {@code ActionListener}. The delay parameter is used
* to set both the initial delay and the delay between event
* firing, in milliseconds. Once the timer has been started,
* it waits for the initial delay before firing its
* first <code>ActionEvent</code> to registered listeners.
* After this first event, it continues to fire events
* every time the between-event delay has elapsed, until it
* is stopped.
* <p>
* After construction, the initial delay and the between-event
* delay can be changed independently, and additional
* <code>ActionListeners</code> may be added.
* <p>
* If you want the timer to fire only the first time and then stop,
* invoke <code>setRepeats(false)</code> on the timer.
* <p>
* Although all <code>Timer</code>s perform their waiting
* using a single, shared thread
* (created by the first <code>Timer</code> object that executes),
* the action event handlers for <code>Timer</code>s
* execute on another thread -- the event-dispatching thread.
* This means that the action handlers for <code>Timer</code>s
* can safely perform operations on Swing components.
* However, it also means that the handlers must execute quickly
* to keep the GUI responsive.
*
* <p>
* In v 1.3, another <code>Timer</code> class was added
* to the Java platform: <code>java.util.Timer</code>.
* Both it and <code>javax.swing.Timer</code>
* provide the same basic functionality,
* but <code>java.util.Timer</code>
* is more general and has more features.
* The <code>javax.swing.Timer</code> has two features
* that can make it a little easier to use with GUIs.
* First, its event handling metaphor is familiar to GUI programmers
* and can make dealing with the event-dispatching thread
* a bit simpler.
* Second, its
* automatic thread sharing means that you don't have to
* take special steps to avoid spawning
* too many threads.
* Instead, your timer uses the same thread
* used to make cursors blink,
* tool tips appear,
* and so on.
*
* <p>
* You can find further documentation
* and several examples of using timers by visiting
* <a href="http://java.sun.com/docs/books/tutorial/uiswing/misc/timer.html"
* target = "_top">How to Use Timers</a>,
* a section in <em>The Java Tutorial.</em>
* For more examples and help in choosing between
* this <code>Timer</code> class and
* <code>java.util.Timer</code>,
* see
* <a href="http://java.sun.com/products/jfc/tsc/articles/timer/"
* target="_top">Using Timers in Swing Applications</a>,
* an article in <em>The Swing Connection.</em>
* <p>
* <strong>Warning:</strong>
* Serialized objects of this class will not be compatible with
* future Swing releases. The current serialization support is
* appropriate for short term storage or RMI between applications running
* the same version of Swing. As of 1.4, support for long term storage
* of all JavaBeans<sup><font size="-2">TM</font></sup>
* has been added to the <code>java.beans</code> package.
* Please see {@link java.beans.XMLEncoder}.
*
* @see java.util.Timer <code>java.util.Timer</code>
*
*
* @author Dave Moore
*/
public class Timer implements Serializable
{
/*
* NOTE: all fields need to be handled in readResolve
*/
protected EventListenerList listenerList = new EventListenerList();
// The following field strives to maintain the following:
// If coalesce is true, only allow one Runnable to be queued on the
// EventQueue and be pending (ie in the process of notifying the
// ActionListener). If we didn't do this it would allow for a
// situation where the app is taking too long to process the
// actionPerformed, and thus we'ld end up queing a bunch of Runnables
// and the app would never return: not good. This of course implies
// you can get dropped events, but such is life.
// notify is used to indicate if the ActionListener can be notified, when
// the Runnable is processed if this is true it will notify the listeners.
// notify is set to true when the Timer fires and the Runnable is queued.
// It will be set to false after notifying the listeners (if coalesce is
// true) or if the developer invokes stop.
private transient final AtomicBoolean notify = new AtomicBoolean(false);
private volatile int initialDelay, delay;
private volatile boolean repeats = true, coalesce = true;
private transient final Runnable doPostEvent;
private static volatile boolean logTimers;
private transient final Lock lock = new ReentrantLock();
// This field is maintained by TimerQueue.
// eventQueued can also be reset by the TimerQueue, but will only ever
// happen in applet case when TimerQueues thread is destroyed.
// access to this field is synchronized on getLock() lock.
transient TimerQueue.DelayedTimer delayedTimer = null;
private volatile String actionCommand;
/**
* Creates a {@code Timer} and initializes both the initial delay and
* between-event delay to {@code delay} milliseconds. If {@code delay}
* is less than or equal to zero, the timer fires as soon as it
* is started. If <code>listener</code> is not <code>null</code>,
* it's registered as an action listener on the timer.
*
* @param delay milliseconds for the initial and between-event delay
* @param listener an initial listener; can be <code>null</code>
*
* @see #addActionListener
* @see #setInitialDelay
* @see #setRepeats
*/
public Timer(int delay, ActionListener listener) {
super();
this.delay = delay;
this.initialDelay = delay;
doPostEvent = new DoPostEvent();
if (listener != null) {
addActionListener(listener);
}
}
/*
* The timer's AccessControlContext.
*/
private transient volatile AccessControlContext acc =
AccessController.getContext();
/**
* Returns the acc this timer was constructed with.
*/
final AccessControlContext getAccessControlContext() {
if (acc == null) {
throw new SecurityException(
"Timer is missing AccessControlContext");
}
return acc;
}
/**
* DoPostEvent is a runnable class that fires actionEvents to
* the listeners on the EventDispatchThread, via invokeLater.
* @see Timer#post
*/
class DoPostEvent implements Runnable
{
public void run() {
if (logTimers) {
System.out.println("Timer ringing: " + Timer.this);
}
if(notify.get()) {
fireActionPerformed(new ActionEvent(Timer.this, 0, getActionCommand(),
System.currentTimeMillis(),
0));
if (coalesce) {
cancelEvent();
}
}
}
Timer getTimer() {
return Timer.this;
}
}
/**
* Adds an action listener to the <code>Timer</code>.
*
* @param listener the listener to add
*
* @see #Timer
*/
public void addActionListener(ActionListener listener) {
listenerList.add(ActionListener.class, listener);
}
/**
* Removes the specified action listener from the <code>Timer</code>.
*
* @param listener the listener to remove
*/
public void removeActionListener(ActionListener listener) {
listenerList.remove(ActionListener.class, listener);
}
/**
* Returns an array of all the action listeners registered
* on this timer.
*
* @return all of the timer's <code>ActionListener</code>s or an empty
* array if no action listeners are currently registered
*
* @see #addActionListener
* @see #removeActionListener
*
* @since 1.4
*/
public ActionListener[] getActionListeners() {
return listenerList.getListeners(ActionListener.class);
}
/**
* Notifies all listeners that have registered interest for
* notification on this event type.
*
* @param e the action event to fire
* @see EventListenerList
*/
protected void fireActionPerformed(ActionEvent e) {
// Guaranteed to return a non-null array
Object[] listeners = listenerList.getListenerList();
// Process the listeners last to first, notifying
// those that are interested in this event
for (int i=listeners.length-2; i>=0; i-=2) {
if (listeners[i]==ActionListener.class) {
((ActionListener)listeners[i+1]).actionPerformed(e);
}
}
}
/**
* Returns an array of all the objects currently registered as
* <code><em>Foo</em>Listener</code>s
* upon this <code>Timer</code>.
* <code><em>Foo</em>Listener</code>s
* are registered using the <code>add<em>Foo</em>Listener</code> method.
* <p>
* You can specify the <code>listenerType</code> argument
* with a class literal, such as <code><em>Foo</em>Listener.class</code>.
* For example, you can query a <code>Timer</code>
* instance <code>t</code>
* for its action listeners
* with the following code:
*
* <pre>ActionListener[] als = (ActionListener[])(t.getListeners(ActionListener.class));</pre>
*
* If no such listeners exist,
* this method returns an empty array.
*
* @param listenerType the type of listeners requested;
* this parameter should specify an interface
* that descends from <code>java.util.EventListener</code>
* @return an array of all objects registered as
* <code><em>Foo</em>Listener</code>s
* on this timer,
* or an empty array if no such
* listeners have been added
* @exception ClassCastException if <code>listenerType</code> doesn't
* specify a class or interface that implements
* <code>java.util.EventListener</code>
*
* @see #getActionListeners
* @see #addActionListener
* @see #removeActionListener
*
* @since 1.3
*/
public <T extends EventListener> T[] getListeners(Class<T> listenerType) {
return listenerList.getListeners(listenerType);
}
/**
* Returns the timer queue.
*/
private TimerQueue timerQueue() {
return TimerQueue.sharedInstance();
}
/**
* Enables or disables the timer log. When enabled, a message
* is posted to <code>System.out</code> whenever the timer goes off.
*
* @param flag <code>true</code> to enable logging
* @see #getLogTimers
*/
public static void setLogTimers(boolean flag) {
logTimers = flag;
}
/**
* Returns <code>true</code> if logging is enabled.
*
* @return <code>true</code> if logging is enabled; otherwise, false
* @see #setLogTimers
*/
public static boolean getLogTimers() {
return logTimers;
}
/**
* Sets the <code>Timer</code>'s between-event delay, the number of milliseconds
* between successive action events. This does not affect the initial delay
* property, which can be set by the {@code setInitialDelay} method.
*
* @param delay the delay in milliseconds
* @see #setInitialDelay
*/
public void setDelay(int delay) {
if (delay < 0) {
throw new IllegalArgumentException("Invalid delay: " + delay);
}
else {
this.delay = delay;
}
}
/**
* Returns the delay, in milliseconds,
* between firings of action events.
*
* @see #setDelay
* @see #getInitialDelay
*/
public int getDelay() {
return delay;
}
/**
* Sets the <code>Timer</code>'s initial delay, the time
* in milliseconds to wait after the timer is started
* before firing the first event. Upon construction, this
* is set to be the same as the between-event delay,
* but then its value is independent and remains unaffected
* by changes to the between-event delay.
*
* @param initialDelay the initial delay, in milliseconds
* @see #setDelay
*/
public void setInitialDelay(int initialDelay) {
if (initialDelay < 0) {
throw new IllegalArgumentException("Invalid initial delay: " +
initialDelay);
}
else {
this.initialDelay = initialDelay;
}
}
/**
* Returns the <code>Timer</code>'s initial delay.
*
* @see #setInitialDelay
* @see #setDelay
*/
public int getInitialDelay() {
return initialDelay;
}
/**
* If <code>flag</code> is <code>false</code>,
* instructs the <code>Timer</code> to send only one
* action event to its listeners.
*
* @param flag specify <code>false</code> to make the timer
* stop after sending its first action event
*/
public void setRepeats(boolean flag) {
repeats = flag;
}
/**
* Returns <code>true</code> (the default)
* if the <code>Timer</code> will send
* an action event
* to its listeners multiple times.
*
* @see #setRepeats
*/
public boolean isRepeats() {
return repeats;
}
/**
* Sets whether the <code>Timer</code> coalesces multiple pending
* <code>ActionEvent</code> firings.
* A busy application may not be able
* to keep up with a <code>Timer</code>'s event generation,
* causing multiple
* action events to be queued. When processed,
* the application sends these events one after the other, causing the
* <code>Timer</code>'s listeners to receive a sequence of
* events with no delay between them. Coalescing avoids this situation
* by reducing multiple pending events to a single event.
* <code>Timer</code>s
* coalesce events by default.
*
* @param flag specify <code>false</code> to turn off coalescing
*/
public void setCoalesce(boolean flag) {
boolean old = coalesce;
coalesce = flag;
if (!old && coalesce) {
// We must do this as otherwise if the Timer once notified
// in !coalese mode notify will be stuck to true and never
// become false.
cancelEvent();
}
}
/**
* Returns <code>true</code> if the <code>Timer</code> coalesces
* multiple pending action events.
*
* @see #setCoalesce
*/
public boolean isCoalesce() {
return coalesce;
}
/**
* Sets the string that will be delivered as the action command
* in <code>ActionEvent</code>s fired by this timer.
* <code>null</code> is an acceptable value.
*
* @param command the action command
* @since 1.6
*/
public void setActionCommand(String command) {
this.actionCommand = command;
}
/**
* Returns the string that will be delivered as the action command
* in <code>ActionEvent</code>s fired by this timer. May be
* <code>null</code>, which is also the default.
*
* @return the action command used in firing events
* @since 1.6
*/
public String getActionCommand() {
return actionCommand;
}
/**
* Starts the <code>Timer</code>,
* causing it to start sending action events
* to its listeners.
*
* @see #stop
*/
public void start() {
timerQueue().addTimer(this, getInitialDelay());
}
/**
* Returns <code>true</code> if the <code>Timer</code> is running.
*
* @see #start
*/
public boolean isRunning() {
return timerQueue().containsTimer(this);
}
/**
* Stops the <code>Timer</code>,
* causing it to stop sending action events
* to its listeners.
*
* @see #start
*/
public void stop() {
getLock().lock();
try {
cancelEvent();
timerQueue().removeTimer(this);
} finally {
getLock().unlock();
}
}
/**
* Restarts the <code>Timer</code>,
* canceling any pending firings and causing
* it to fire with its initial delay.
*/
public void restart() {
getLock().lock();
try {
stop();
start();
} finally {
getLock().unlock();
}
}
/**
* Resets the internal state to indicate this Timer shouldn't notify
* any of its listeners. This does not stop a repeatable Timer from
* firing again, use <code>stop</code> for that.
*/
void cancelEvent() {
notify.set(false);
}
void post() {
if (notify.compareAndSet(false, true) || !coalesce) {
AccessController.doPrivileged(new PrivilegedAction<Void>() {
public Void run() {
SwingUtilities.invokeLater(doPostEvent);
return null;
}
}, getAccessControlContext());
}
}
Lock getLock() {
return lock;
}
private void readObject(ObjectInputStream in)
throws ClassNotFoundException, IOException
{
this.acc = AccessController.getContext();
in.defaultReadObject();
}
/*
* We have to use readResolve because we can not initialize final
* fields for deserialized object otherwise
*/
private Object readResolve() {
Timer timer = new Timer(getDelay(), null);
timer.listenerList = listenerList;
timer.initialDelay = initialDelay;
timer.delay = delay;
timer.repeats = repeats;
timer.coalesce = coalesce;
timer.actionCommand = actionCommand;
return timer;
}
}
| apache-2.0 |
eltonsalles/lojagames | src/main/java/br/senac/tads4/piiv/service/event/estoque/AtualizarEstoqueListener.java | 2475 | package br.senac.tads4.piiv.service.event.estoque;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;
import br.senac.tads4.piiv.model.ItemPedido;
import br.senac.tads4.piiv.model.Pedido;
import br.senac.tads4.piiv.model.Produto;
import br.senac.tads4.piiv.repository.PedidoRepository;
import br.senac.tads4.piiv.repository.ProdutoRepository;
@Component
public class AtualizarEstoqueListener {
@Autowired
private ProdutoRepository produtoRepository;
@Autowired
private PedidoRepository pedidoRepository;
/**
* Evento realizado quando acontece uma entrada de produto pelo formulário de movimentação
*
* @param evento
*/
@EventListener(condition = "#evento.getEntrada()")
public void movimentacaoEntrada(AtualizarEstoqueEvent evento) {
this.entrarEstoque(evento.getHistoricoProduto().getProduto(), evento.getHistoricoProduto().getQuantidade());
}
/**
* Evento realizado quando acontece uma saída (quebra) de produto pelo formulário de movimentação
*
* @param evento
*/
@EventListener(condition = "#evento.getQuebra()")
public void movimentacaoQuebra(AtualizarEstoqueEvent evento) {
this.baixarEstoque(evento.getHistoricoProduto().getProduto(), evento.getHistoricoProduto().getQuantidade());
}
/**
* Evento realizado quando acontece uma venda
*
* @param evento
*/
@EventListener(condition = "#evento.getVenda()")
public void venda(AtualizarEstoqueEvent evento) {
for (ItemPedido item : evento.getItensPedido()) {
this.baixarEstoque(item.getProduto(), item.getQuantidade());
}
}
/**
* Evento realizado ao cancelar um pedido
*
* @param evento
*/
@EventListener(condition = "#evento.getCancelarPedido()")
public void cancelarPedido(AtualizarEstoqueEvent evento) {
Pedido pedido = pedidoRepository.findOne(evento.getIdPedido());
for (ItemPedido item : pedido.getItensPedido()) {
this.entrarEstoque(item.getProduto(), item.getQuantidade());
}
}
private void baixarEstoque(Produto produto, Integer quantidade) {
Produto p = produtoRepository.findOne(produto.getIdProduto());
p.setEstoque(p.getEstoque() - quantidade);
produtoRepository.save(p);
}
private void entrarEstoque(Produto produto, Integer quantidade) {
Produto p = produtoRepository.findOne(produto.getIdProduto());
p.setEstoque(p.getEstoque() + quantidade);
produtoRepository.save(p);
}
}
| apache-2.0 |
wmm387/wmm | app/src/main/java/com/wangyuanwmm/wmm/BasePresenter.java | 202 | package com.wangyuanwmm.wmm;
public interface BasePresenter {
//获取数据并改变界面显示,在todo-mvp的项目中的调用时机为Fragment的onResume()方法中
void start();
}
| apache-2.0 |
juhalindfors/bazel-patches | src/main/java/com/google/devtools/build/lib/sandbox/SandboxHelpers.java | 5417 | // Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.sandbox;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSet.Builder;
import com.google.devtools.build.lib.actions.ActionExecutionContext;
import com.google.devtools.build.lib.actions.ActionInput;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.ExecException;
import com.google.devtools.build.lib.actions.Executor;
import com.google.devtools.build.lib.actions.Spawn;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.buildtool.BuildRequest;
import com.google.devtools.build.lib.exec.SpawnInputExpander;
import com.google.devtools.build.lib.rules.fileset.FilesetActionContext;
import com.google.devtools.build.lib.standalone.StandaloneSpawnStrategy;
import com.google.devtools.build.lib.util.Preconditions;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
/** Helper methods that are shared by the different sandboxing strategies in this package. */
public final class SandboxHelpers {
static void fallbackToNonSandboxedExecution(
Spawn spawn, ActionExecutionContext actionExecutionContext, Executor executor)
throws ExecException, InterruptedException {
StandaloneSpawnStrategy standaloneStrategy =
Preconditions.checkNotNull(executor.getContext(StandaloneSpawnStrategy.class));
standaloneStrategy.exec(spawn, actionExecutionContext);
}
static void reportSubcommand(Executor executor, Spawn spawn) {
if (executor.reportsSubcommands()) {
executor.reportSubcommand(spawn);
}
}
/**
* Returns the inputs of a Spawn as a map of PathFragments relative to an execRoot to paths in the
* host filesystem where the input files can be found.
*/
public static Map<PathFragment, Path> getInputFiles(
SpawnInputExpander spawnInputExpander,
Path execRoot,
Spawn spawn,
ActionExecutionContext executionContext)
throws IOException {
Map<PathFragment, ActionInput> inputMap =
spawnInputExpander.getInputMapping(
spawn,
executionContext.getArtifactExpander(),
executionContext.getActionInputFileCache(),
executionContext.getExecutor().getContext(FilesetActionContext.class));
// SpawnInputExpander#getInputMapping uses ArtifactExpander#expandArtifacts to expand
// middlemen and tree artifacts, which expands empty tree artifacts to no entry. However,
// actions that accept TreeArtifacts as inputs generally expect that the empty directory is
// created. So we add those explicitly here.
// TODO(ulfjack): Move this code to SpawnInputExpander.
for (ActionInput input : spawn.getInputFiles()) {
if (input instanceof Artifact && ((Artifact) input).isTreeArtifact()) {
List<Artifact> containedArtifacts = new ArrayList<>();
executionContext.getArtifactExpander().expand((Artifact) input, containedArtifacts);
// Attempting to mount a non-empty directory results in ERR_DIRECTORY_NOT_EMPTY, so we
// only mount empty TreeArtifacts as directories.
if (containedArtifacts.isEmpty()) {
inputMap.put(input.getExecPath(), input);
}
}
}
Map<PathFragment, Path> inputFiles = new TreeMap<>();
for (Map.Entry<PathFragment, ActionInput> e : inputMap.entrySet()) {
Path inputPath =
e.getValue() == SpawnInputExpander.EMPTY_FILE
? null
: execRoot.getRelative(e.getValue().getExecPath());
inputFiles.put(e.getKey(), inputPath);
}
return inputFiles;
}
public static ImmutableSet<PathFragment> getOutputFiles(Spawn spawn) {
Builder<PathFragment> outputFiles = ImmutableSet.builder();
for (ActionInput output : spawn.getOutputFiles()) {
outputFiles.add(PathFragment.create(output.getExecPathString()));
}
return outputFiles.build();
}
static boolean shouldAllowNetwork(BuildRequest buildRequest, Spawn spawn) {
// Allow network access, when --java_debug is specified, otherwise we can't connect to the
// remote debug server of the test. This intentionally overrides the "block-network" execution
// tag.
if (buildRequest
.getOptions(BuildConfiguration.Options.class)
.testArguments
.contains("--wrapper_script_flag=--debug")) {
return true;
}
// If the Spawn requests to block network access, do so.
if (spawn.getExecutionInfo().containsKey("block-network")) {
return false;
}
// Network access is allowed by default.
return true;
}
}
| apache-2.0 |
greghogan/flink | flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/failover/flip1/RestartPipelinedRegionFailoverStrategyTest.java | 15905 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.executiongraph.failover.flip1;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.executiongraph.ExecutionAttemptID;
import org.apache.flink.runtime.io.network.partition.ResultPartitionID;
import org.apache.flink.runtime.io.network.partition.ResultPartitionType;
import org.apache.flink.runtime.io.network.partition.consumer.PartitionConnectionException;
import org.apache.flink.runtime.jobgraph.IntermediateResultPartitionID;
import org.apache.flink.runtime.scheduler.strategy.SchedulingExecutionVertex;
import org.apache.flink.runtime.scheduler.strategy.SchedulingResultPartition;
import org.apache.flink.runtime.scheduler.strategy.TestingSchedulingExecutionVertex;
import org.apache.flink.runtime.scheduler.strategy.TestingSchedulingResultPartition;
import org.apache.flink.runtime.scheduler.strategy.TestingSchedulingTopology;
import org.apache.flink.util.TestLogger;
import org.junit.Test;
import java.util.HashSet;
import java.util.Iterator;
import java.util.stream.Stream;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
/**
* Tests the failure handling logic of the {@link RestartPipelinedRegionFailoverStrategy}.
*/
public class RestartPipelinedRegionFailoverStrategyTest extends TestLogger {
/**
* Tests for scenes that a task fails for its own error, in which case the
* region containing the failed task and its consumer regions should be restarted.
* <pre>
* (v1) -+-> (v4)
* x
* (v2) -+-> (v5)
*
* (v3) -+-> (v6)
*
* ^
* |
* (blocking)
* </pre>
* Each vertex is in an individual region.
*/
@Test
public void testRegionFailoverForRegionInternalErrors() {
final TestingSchedulingTopology topology = new TestingSchedulingTopology();
TestingSchedulingExecutionVertex v1 = topology.newExecutionVertex(ExecutionState.FINISHED);
TestingSchedulingExecutionVertex v2 = topology.newExecutionVertex(ExecutionState.FINISHED);
TestingSchedulingExecutionVertex v3 = topology.newExecutionVertex(ExecutionState.FINISHED);
TestingSchedulingExecutionVertex v4 = topology.newExecutionVertex(ExecutionState.FINISHED);
TestingSchedulingExecutionVertex v5 = topology.newExecutionVertex(ExecutionState.SCHEDULED);
TestingSchedulingExecutionVertex v6 = topology.newExecutionVertex(ExecutionState.RUNNING);
topology.connect(v1, v4, ResultPartitionType.BLOCKING);
topology.connect(v1, v5, ResultPartitionType.BLOCKING);
topology.connect(v2, v4, ResultPartitionType.BLOCKING);
topology.connect(v2, v5, ResultPartitionType.BLOCKING);
topology.connect(v3, v6, ResultPartitionType.BLOCKING);
RestartPipelinedRegionFailoverStrategy strategy = new RestartPipelinedRegionFailoverStrategy(topology);
verifyThatFailedExecution(strategy, v1).restarts(v1, v4, v5);
verifyThatFailedExecution(strategy, v2).restarts(v2, v4, v5);
verifyThatFailedExecution(strategy, v3).restarts(v3, v6);
verifyThatFailedExecution(strategy, v4).restarts(v4);
verifyThatFailedExecution(strategy, v5).restarts(v5);
verifyThatFailedExecution(strategy, v6).restarts(v6);
}
/**
* Tests for scenes that a task fails for data consumption error, in which case the
* region containing the failed task, the region containing the unavailable result partition
* and all their consumer regions should be restarted.
* <pre>
* (v1) -+-> (v4)
* x
* (v2) -+-> (v5)
*
* (v3) -+-> (v6)
*
* ^
* |
* (blocking)
* </pre>
* Each vertex is in an individual region.
*/
@Test
public void testRegionFailoverForDataConsumptionErrors() throws Exception {
TestingSchedulingTopology topology = new TestingSchedulingTopology();
TestingSchedulingExecutionVertex v1 = topology.newExecutionVertex(ExecutionState.FINISHED);
TestingSchedulingExecutionVertex v2 = topology.newExecutionVertex(ExecutionState.FINISHED);
TestingSchedulingExecutionVertex v3 = topology.newExecutionVertex(ExecutionState.FINISHED);
TestingSchedulingExecutionVertex v4 = topology.newExecutionVertex(ExecutionState.RUNNING);
TestingSchedulingExecutionVertex v5 = topology.newExecutionVertex(ExecutionState.RUNNING);
TestingSchedulingExecutionVertex v6 = topology.newExecutionVertex(ExecutionState.RUNNING);
topology.connect(v1, v4, ResultPartitionType.BLOCKING);
topology.connect(v1, v5, ResultPartitionType.BLOCKING);
topology.connect(v2, v4, ResultPartitionType.BLOCKING);
topology.connect(v2, v5, ResultPartitionType.BLOCKING);
topology.connect(v3, v6, ResultPartitionType.BLOCKING);
RestartPipelinedRegionFailoverStrategy strategy = new RestartPipelinedRegionFailoverStrategy(topology);
Iterator<TestingSchedulingResultPartition> v4InputEdgeIterator = v4.getConsumedResults().iterator();
TestingSchedulingResultPartition v1out = v4InputEdgeIterator.next();
verifyThatFailedExecution(strategy, v4)
.partitionConnectionCause(v1out)
.restarts(v1, v4, v5);
TestingSchedulingResultPartition v2out = v4InputEdgeIterator.next();
verifyThatFailedExecution(strategy, v4)
.partitionConnectionCause(v2out)
.restarts(v2, v4, v5);
Iterator<TestingSchedulingResultPartition> v5InputEdgeIterator = v5.getConsumedResults().iterator();
v1out = v5InputEdgeIterator.next();
verifyThatFailedExecution(strategy, v5)
.partitionConnectionCause(v1out)
.restarts(v1, v4, v5);
v2out = v5InputEdgeIterator.next();
verifyThatFailedExecution(strategy, v5)
.partitionConnectionCause(v2out)
.restarts(v2, v4, v5);
TestingSchedulingResultPartition v3out = v6.getConsumedResults().iterator().next();
verifyThatFailedExecution(strategy, v6)
.partitionConnectionCause(v3out)
.restarts(v3, v6);
}
/**
* Tests to verify region failover results regarding different input result partition availability combinations.
* <pre>
* (v1) --rp1--\
* (v3)
* (v2) --rp2--/
*
* ^
* |
* (blocking)
* </pre>
* Each vertex is in an individual region.
* rp1, rp2 are result partitions.
*/
@Test
public void testRegionFailoverForVariousResultPartitionAvailabilityCombinations() throws Exception {
TestingSchedulingTopology topology = new TestingSchedulingTopology();
TestingSchedulingExecutionVertex v1 = topology.newExecutionVertex(ExecutionState.FINISHED);
TestingSchedulingExecutionVertex v2 = topology.newExecutionVertex(ExecutionState.FINISHED);
TestingSchedulingExecutionVertex v3 = topology.newExecutionVertex(ExecutionState.RUNNING);
topology.connect(v1, v3, ResultPartitionType.BLOCKING);
topology.connect(v2, v3, ResultPartitionType.BLOCKING);
TestResultPartitionAvailabilityChecker availabilityChecker = new TestResultPartitionAvailabilityChecker();
RestartPipelinedRegionFailoverStrategy strategy = new RestartPipelinedRegionFailoverStrategy(topology, availabilityChecker);
IntermediateResultPartitionID rp1ID = v1.getProducedResults().iterator().next().getId();
IntermediateResultPartitionID rp2ID = v2.getProducedResults().iterator().next().getId();
// -------------------------------------------------
// Combination1: (rp1 == available, rp2 == available)
// -------------------------------------------------
availabilityChecker.failedPartitions.clear();
verifyThatFailedExecution(strategy, v1).restarts(v1, v3);
verifyThatFailedExecution(strategy, v2).restarts(v2, v3);
verifyThatFailedExecution(strategy, v3).restarts(v3);
// -------------------------------------------------
// Combination2: (rp1 == unavailable, rp2 == available)
// -------------------------------------------------
availabilityChecker.failedPartitions.clear();
availabilityChecker.markResultPartitionFailed(rp1ID);
verifyThatFailedExecution(strategy, v1).restarts(v1, v3);
verifyThatFailedExecution(strategy, v2).restarts(v1, v2, v3);
verifyThatFailedExecution(strategy, v3).restarts(v1, v3);
// -------------------------------------------------
// Combination3: (rp1 == available, rp2 == unavailable)
// -------------------------------------------------
availabilityChecker.failedPartitions.clear();
availabilityChecker.markResultPartitionFailed(rp2ID);
verifyThatFailedExecution(strategy, v1).restarts(v1, v2, v3);
verifyThatFailedExecution(strategy, v2).restarts(v2, v3);
verifyThatFailedExecution(strategy, v3).restarts(v2, v3);
// -------------------------------------------------
// Combination4: (rp1 == unavailable, rp == unavailable)
// -------------------------------------------------
availabilityChecker.failedPartitions.clear();
availabilityChecker.markResultPartitionFailed(rp1ID);
availabilityChecker.markResultPartitionFailed(rp2ID);
verifyThatFailedExecution(strategy, v1).restarts(v1, v2, v3);
verifyThatFailedExecution(strategy, v2).restarts(v1, v2, v3);
verifyThatFailedExecution(strategy, v3).restarts(v1, v2, v3);
}
/**
* Tests region failover scenes for topology with multiple vertices.
* <pre>
* (v1) ---> (v2) --|--> (v3) ---> (v4) --|--> (v5) ---> (v6)
*
* ^ ^ ^ ^ ^
* | | | | |
* (pipelined) (blocking) (pipelined) (blocking) (pipelined)
* </pre>
* Component 1: 1,2; component 2: 3,4; component 3: 5,6
*/
@Test
public void testRegionFailoverForMultipleVerticesRegions() throws Exception {
TestingSchedulingTopology topology = new TestingSchedulingTopology();
TestingSchedulingExecutionVertex v1 = topology.newExecutionVertex(ExecutionState.FINISHED);
TestingSchedulingExecutionVertex v2 = topology.newExecutionVertex(ExecutionState.FINISHED);
TestingSchedulingExecutionVertex v3 = topology.newExecutionVertex(ExecutionState.RUNNING);
TestingSchedulingExecutionVertex v4 = topology.newExecutionVertex(ExecutionState.RUNNING);
TestingSchedulingExecutionVertex v5 = topology.newExecutionVertex(ExecutionState.FAILED);
TestingSchedulingExecutionVertex v6 = topology.newExecutionVertex(ExecutionState.CANCELED);
topology.connect(v1, v2, ResultPartitionType.PIPELINED);
topology.connect(v2, v3, ResultPartitionType.BLOCKING);
topology.connect(v3, v4, ResultPartitionType.PIPELINED);
topology.connect(v4, v5, ResultPartitionType.BLOCKING);
topology.connect(v5, v6, ResultPartitionType.PIPELINED);
RestartPipelinedRegionFailoverStrategy strategy = new RestartPipelinedRegionFailoverStrategy(topology);
verifyThatFailedExecution(strategy, v3).restarts(v3, v4, v5, v6);
TestingSchedulingResultPartition v2out = v3.getConsumedResults().iterator().next();
verifyThatFailedExecution(strategy, v3)
.partitionConnectionCause(v2out)
.restarts(v1, v2, v3, v4, v5, v6);
}
/**
* Tests region failover does not restart vertexes which are already in initial CREATED state.
* <pre>
* (v1) --|--> (v2)
*
* ^
* |
* (blocking)
* </pre>
* Component 1: 1; component 2: 2
*/
@Test
public void testRegionFailoverDoesNotRestartCreatedExecutions() {
TestingSchedulingTopology topology = new TestingSchedulingTopology();
TestingSchedulingExecutionVertex v1 = topology.newExecutionVertex(ExecutionState.CREATED);
TestingSchedulingExecutionVertex v2 = topology.newExecutionVertex(ExecutionState.CREATED);
topology.connect(v1, v2, ResultPartitionType.BLOCKING);
FailoverStrategy strategy = new RestartPipelinedRegionFailoverStrategy(topology);
verifyThatFailedExecution(strategy, v2).restarts();
TestingSchedulingResultPartition v1out = v2.getConsumedResults().iterator().next();
verifyThatFailedExecution(strategy, v2).partitionConnectionCause(v1out).restarts();
}
/**
* Tests approximate local recovery downstream failover .
* <pre>
* (v1) -----> (v2) -----> (v4)
* | ^
* |--------> (v3) --------|
* </pre>
*/
@Test
public void testRegionFailoverForPipelinedApproximate() {
final TestingSchedulingTopology topology = new TestingSchedulingTopology();
TestingSchedulingExecutionVertex v1 = topology.newExecutionVertex(ExecutionState.RUNNING);
TestingSchedulingExecutionVertex v2 = topology.newExecutionVertex(ExecutionState.RUNNING);
TestingSchedulingExecutionVertex v3 = topology.newExecutionVertex(ExecutionState.RUNNING);
TestingSchedulingExecutionVertex v4 = topology.newExecutionVertex(ExecutionState.RUNNING);
topology.connect(v1, v2, ResultPartitionType.PIPELINED_APPROXIMATE);
topology.connect(v1, v3, ResultPartitionType.PIPELINED_APPROXIMATE);
topology.connect(v2, v4, ResultPartitionType.PIPELINED_APPROXIMATE);
topology.connect(v3, v4, ResultPartitionType.PIPELINED_APPROXIMATE);
RestartPipelinedRegionFailoverStrategy strategy = new RestartPipelinedRegionFailoverStrategy(topology);
verifyThatFailedExecution(strategy, v1).restarts(v1, v2, v3, v4);
verifyThatFailedExecution(strategy, v2).restarts(v2, v4);
verifyThatFailedExecution(strategy, v3).restarts(v3, v4);
verifyThatFailedExecution(strategy, v4).restarts(v4);
}
private static VerificationContext verifyThatFailedExecution(
FailoverStrategy strategy,
SchedulingExecutionVertex executionVertex) {
return new VerificationContext(strategy, executionVertex);
}
private static class VerificationContext {
private final FailoverStrategy strategy;
private final SchedulingExecutionVertex executionVertex;
private Throwable cause = new Exception("Test failure");
private VerificationContext(
FailoverStrategy strategy,
SchedulingExecutionVertex executionVertex) {
this.strategy = strategy;
this.executionVertex = executionVertex;
}
private VerificationContext partitionConnectionCause(
SchedulingResultPartition failedProducer) {
return cause(new PartitionConnectionException(
new ResultPartitionID(failedProducer.getId(), new ExecutionAttemptID()),
new Exception("Test failure")));
}
private VerificationContext cause(Throwable cause) {
this.cause = cause;
return this;
}
private void restarts(SchedulingExecutionVertex ... expectedResult) {
assertThat(
strategy.getTasksNeedingRestart(executionVertex.getId(), cause),
containsInAnyOrder(Stream.of(expectedResult).map(SchedulingExecutionVertex::getId).toArray()));
}
}
private static class TestResultPartitionAvailabilityChecker implements ResultPartitionAvailabilityChecker {
private final HashSet<IntermediateResultPartitionID> failedPartitions;
public TestResultPartitionAvailabilityChecker() {
this.failedPartitions = new HashSet<>();
}
@Override
public boolean isAvailable(IntermediateResultPartitionID resultPartitionID) {
return !failedPartitions.contains(resultPartitionID);
}
public void markResultPartitionFailed(IntermediateResultPartitionID resultPartitionID) {
failedPartitions.add(resultPartitionID);
}
public void removeResultPartitionFromFailedState(IntermediateResultPartitionID resultPartitionID) {
failedPartitions.remove(resultPartitionID);
}
}
}
| apache-2.0 |
marc-ashman/500px | 500px-gallery/500px-App/src/main/java/com/ashman/fivehundredpx/FullPictureViewActivity.java | 5182 | package com.ashman.fivehundredpx;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.Point;
import android.os.Build;
import android.os.Bundle;
import android.view.View;
import android.widget.TextView;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.ImageRequest;
import com.ashman.fivehundredpx.net.json.replies.Photo;
public class FullPictureViewActivity extends BaseFragmentActivity {
public static final String BUNDLE_PHOTO = ".photo";
private Bitmap bitmap;
private FullImageViewContainer container;
private TextView errorText;
private View loadingLayout;
private Photo photo;
private Response.Listener<Bitmap> requestListener = new Response.Listener<Bitmap>() {
@Override
public void onResponse(Bitmap response) {
setImage(response);
}
};
private Response.ErrorListener normalErrorListener = new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
errorText.setText(getString(R.string.fullpic_error));
showView(errorText);
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_full_picture_view);
String packageName = FiveHundredApplication.get().getPackageName();
Bundle bundle = getIntent().getExtras();
if(bundle == null)
throw new IllegalStateException("Bundle with photo object required by this activity");
photo = bundle.getParcelable(packageName + BUNDLE_PHOTO);
if(photo == null)
throw new IllegalStateException("Bundle with photo object required by this activity");
container = (FullImageViewContainer) findViewById(R.id.fullpic_container);
loadingLayout = findViewById(R.id.fullpic_loadingLayout);
errorText = (TextView) findViewById(R.id.fullpic_errorText);
}
@Override
public void onResume() {
super.onResume();
if(bitmap == null)
downloadImage(photo);
else
setImage(bitmap);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if(bitmap != null)
setImage(bitmap);
else
downloadImage(photo);
}
private void downloadImage(final Photo photoToDownload) {
showView(loadingLayout);
final String largestUrl = photoToDownload.getLargestSizeUrl();
if(largestUrl.length() > 5 &&
largestUrl.endsWith("1.jpg") ||
largestUrl.endsWith("2.jpg") ||
largestUrl.endsWith("3.jpg") ||
largestUrl.endsWith("4.jpg")) {
//I found out that there exists a 5.jpg image on the same photo which
// is higher res, so try to download that one first
String highResUrl = largestUrl.substring(0, largestUrl.length() - 5) + "5.jpg";
Response.ErrorListener highResErrorListener = new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
downloadImage(largestUrl, requestListener, normalErrorListener);
}
};
downloadImage(highResUrl, requestListener, highResErrorListener);
} else {
downloadImage(largestUrl, requestListener, normalErrorListener);
}
}
private void downloadImage(String url, Response.Listener<Bitmap> listener,
Response.ErrorListener errorListener) {
ImageRequest request = new ImageRequest(url, listener, 0, 0,
Bitmap.Config.ARGB_8888, errorListener);
getRequestQueue().add(request).setTag(getRequestQueue());
}
private void showView(View view) {
if(view != errorText && view != loadingLayout && view != container)
throw new IllegalArgumentException(
"Must be one of errorText, loadingLayout or container");
errorText.setVisibility(View.GONE);
loadingLayout.setVisibility(View.GONE);
container.setVisibility(View.GONE);
view.setVisibility(View.VISIBLE);
}
private void setImage(Bitmap bitmap) {
this.bitmap = bitmap;
int width, height;
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR2) {
Point point = new Point();
getWindowManager().getDefaultDisplay().getSize(point);
width = point.x;
height = point.y;
} else {
width = getWindowManager().getDefaultDisplay().getWidth();
height = getWindowManager().getDefaultDisplay().getHeight();
}
float scaleWidth = (float)width / (float)bitmap.getWidth();
float scaleHeight = (float)height / (float)bitmap.getHeight();
float minZoom = (scaleHeight > scaleWidth) ? scaleWidth : scaleHeight;
container.setImage(bitmap, minZoom, 50);
showView(container);
}
}
| apache-2.0 |
VibyJocke/gocd | common/src/com/thoughtworks/go/config/materials/ScmMaterial.java | 7928 | /*
* Copyright 2015 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config.materials;
import com.thoughtworks.go.config.CaseInsensitiveString;
import com.thoughtworks.go.config.PipelineConfig;
import com.thoughtworks.go.domain.MaterialRevision;
import com.thoughtworks.go.domain.materials.*;
import com.thoughtworks.go.util.command.EnvironmentVariableContext;
import com.thoughtworks.go.util.command.InMemoryStreamConsumer;
import com.thoughtworks.go.util.command.ProcessOutputStreamConsumer;
import com.thoughtworks.go.util.command.UrlArgument;
import org.apache.commons.lang.StringUtils;
import java.io.File;
import java.util.Map;
import static com.thoughtworks.go.util.command.EnvironmentVariableContext.escapeEnvironmentVariable;
/**
* @understands a source control repository and its configuration
*/
public abstract class ScmMaterial extends AbstractMaterial {
public static final String GO_REVISION = "GO_REVISION";
public static final String GO_TO_REVISION = "GO_TO_REVISION";
public static final String GO_FROM_REVISION = "GO_FROM_REVISION";
protected Filter filter;
protected String folder;
protected boolean autoUpdate = true;
public ScmMaterial(String typeName) {
super(typeName);
}
@Override protected void appendPipelineUniqueCriteria(Map<String, Object> basicCriteria) {
basicCriteria.put("dest", folder);
}
public File workingdir(File baseFolder) {
if (getFolder() == null) {
return baseFolder;
}
return new File(baseFolder, getFolder());
}
public String updatingTarget() {
return StringUtils.isEmpty(getFolder()) ? "files" : getFolder();
}
public void toJson(Map json, Revision revision) {
json.put("folder", getFolder() == null ? "" : getFolder());
json.put("scmType", getTypeForDisplay());
json.put("location", getLocation());
if (!CaseInsensitiveString.isBlank(getName())) {
json.put("materialName", CaseInsensitiveString.str(getName()));
}
json.put("action", "Modified");
}
//most of the material such as hg, git, p4 all print the file from the root without '/'
//but subverion print it with '/', we standarize it here. look at the implementation of subversion as well.
public boolean matches(String name, String regex) {
if (regex.startsWith("/")) {
regex = regex.substring(1);
}
return name.matches(regex);
}
public void checkout(File baseDir, Revision revision, SubprocessExecutionContext execCtx) {
InMemoryStreamConsumer output = ProcessOutputStreamConsumer.inMemoryConsumer();
this.updateTo(output, baseDir, new RevisionContext(revision), execCtx);
}
public abstract String getUserName();
public abstract String getPassword();
public abstract String getEncryptedPassword();
public abstract boolean isCheckExternals();
public abstract String getUrl();
protected abstract UrlArgument getUrlArgument();
protected abstract String getLocation();
public void setFilter(Filter filter) {
this.filter = filter;
}
public void emailContent(StringBuilder content, Modification modification) {
content.append(getTypeForDisplay() + ": " + getLocation()).append('\n').append(
String.format("revision: %s, modified by %s on %s", modification.getRevision(),
modification.getUserName(), modification.getModifiedTime()))
.append('\n')
.append(modification.getComment());
}
public String getDescription() {
return getUriForDisplay();
}
public String getUriForDisplay() {
return getUrlArgument().forDisplay();
}
public void populateEnvironmentContext(EnvironmentVariableContext environmentVariableContext, MaterialRevision materialRevision, File workingDir) {
String toRevision = materialRevision.getRevision().getRevision();
String fromRevision = materialRevision.getOldestRevision().getRevision();
setGoRevisionVariables(environmentVariableContext, fromRevision, toRevision);
}
private void setGoRevisionVariables(EnvironmentVariableContext environmentVariableContext, String fromRevision, String toRevision) {
setVariableWithName(environmentVariableContext, toRevision, GO_REVISION);
setVariableWithName(environmentVariableContext, toRevision, GO_TO_REVISION);
setVariableWithName(environmentVariableContext, fromRevision, GO_FROM_REVISION);
}
protected void setVariableWithName(EnvironmentVariableContext environmentVariableContext, String value, String propertyName) {
if (!CaseInsensitiveString.isBlank(this.name)) {
environmentVariableContext.setProperty(propertyName + "_" + escapeEnvironmentVariable(this.name.toUpper()), value, false);
return;
}
String scrubbedFolder = escapeEnvironmentVariable(folder);
if (!StringUtils.isEmpty(scrubbedFolder)) {
environmentVariableContext.setProperty(propertyName + "_" + scrubbedFolder, value, false);
} else {
environmentVariableContext.setProperty(propertyName, value, false);
}
}
public String getFolder() {
return folder;
}
public String getDisplayName() {
return name == null ? getUriForDisplay() : CaseInsensitiveString.str(name);
}
public boolean isAutoUpdate() {
return autoUpdate;
}
public boolean getAutoUpdate() {
return autoUpdate;
}
public void setAutoUpdate(boolean value) {
autoUpdate = value;
}
public final MatchedRevision createMatchedRevision(Modification modification, String searchString) {
return new MatchedRevision(searchString, getShortRevision(modification.getRevision()), modification.getRevision(), modification.getUserName(), modification.getModifiedTime(),
modification.getComment());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
ScmMaterial that = (ScmMaterial) o;
if (folder != null ? !folder.equals(that.folder) : that.folder != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (folder != null ? folder.hashCode() : 0);
return result;
}
public static String changesetUrl(Modification modification, String baseUrl, final long id) {
return baseUrl + "/api/materials/" + id + "/changeset/" + modification.getRevision() + ".xml";
}
public Boolean isUsedInFetchArtifact(PipelineConfig pipelineConfig) {
return false;
}
// TODO: Consider renaming this to dest since we use that word in the UI & Config
public void setFolder(String folder) {
this.folder = folder;
}
public Revision oldestRevision(Modifications modifications) {
return Modification.oldestRevision(modifications);
}
@Override
public boolean supportsDestinationFolder() {
return true;
}
}
| apache-2.0 |
googleads/googleads-java-lib | modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202111/CreativeNativeStylePreview.java | 6526 | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* CreativeNativeStylePreview.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202111;
/**
* Represents the {@link NativeStyle} of a {@link Creative} and its
* corresponding preview URL.
*/
public class CreativeNativeStylePreview implements java.io.Serializable {
/* The id of the {@link NativeStyle}. */
private java.lang.Long nativeStyleId;
/* The URL for previewing this creative using this particular
* {@link NativeStyle} */
private java.lang.String previewUrl;
public CreativeNativeStylePreview() {
}
public CreativeNativeStylePreview(
java.lang.Long nativeStyleId,
java.lang.String previewUrl) {
this.nativeStyleId = nativeStyleId;
this.previewUrl = previewUrl;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("nativeStyleId", getNativeStyleId())
.add("previewUrl", getPreviewUrl())
.toString();
}
/**
* Gets the nativeStyleId value for this CreativeNativeStylePreview.
*
* @return nativeStyleId * The id of the {@link NativeStyle}.
*/
public java.lang.Long getNativeStyleId() {
return nativeStyleId;
}
/**
* Sets the nativeStyleId value for this CreativeNativeStylePreview.
*
* @param nativeStyleId * The id of the {@link NativeStyle}.
*/
public void setNativeStyleId(java.lang.Long nativeStyleId) {
this.nativeStyleId = nativeStyleId;
}
/**
* Gets the previewUrl value for this CreativeNativeStylePreview.
*
* @return previewUrl * The URL for previewing this creative using this particular
* {@link NativeStyle}
*/
public java.lang.String getPreviewUrl() {
return previewUrl;
}
/**
* Sets the previewUrl value for this CreativeNativeStylePreview.
*
* @param previewUrl * The URL for previewing this creative using this particular
* {@link NativeStyle}
*/
public void setPreviewUrl(java.lang.String previewUrl) {
this.previewUrl = previewUrl;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof CreativeNativeStylePreview)) return false;
CreativeNativeStylePreview other = (CreativeNativeStylePreview) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.nativeStyleId==null && other.getNativeStyleId()==null) ||
(this.nativeStyleId!=null &&
this.nativeStyleId.equals(other.getNativeStyleId()))) &&
((this.previewUrl==null && other.getPreviewUrl()==null) ||
(this.previewUrl!=null &&
this.previewUrl.equals(other.getPreviewUrl())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getNativeStyleId() != null) {
_hashCode += getNativeStyleId().hashCode();
}
if (getPreviewUrl() != null) {
_hashCode += getPreviewUrl().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(CreativeNativeStylePreview.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CreativeNativeStylePreview"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("nativeStyleId");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "nativeStyleId"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("previewUrl");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "previewUrl"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| apache-2.0 |
manstis/drools | drools-test-coverage/test-compiler-integration/src/test/java/org/drools/mvel/model/MockObjectSource.java | 2732 | /*
* Copyright 2005 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.mvel.model;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.common.RuleBasePartitionId;
import org.drools.core.reteoo.ObjectSink;
import org.drools.core.reteoo.ObjectSource;
import org.drools.core.reteoo.builder.BuildContext;
import org.drools.core.spi.PropagationContext;
import org.drools.core.util.bitmask.BitMask;
public class MockObjectSource extends ObjectSource {
private static final long serialVersionUID = 510l;
private int attached;
private int updated;
private List facts;
public MockObjectSource() {
}
public MockObjectSource( final int id) {
super( id, RuleBasePartitionId.MAIN_PARTITION, false);
this.facts = new ArrayList();
}
public void attach() {
this.attached++;
}
public int getAttached() {
return this.attached;
}
public int getUdated() {
return this.updated;
}
public void addFact(final InternalFactHandle handle) {
this.facts.add( handle );
}
public void removeFact(final InternalFactHandle handle) {
this.facts.remove( handle );
}
public void updateSink(final ObjectSink sink,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
this.updated++;
for ( final Iterator it = this.facts.iterator(); it.hasNext(); ) {
final InternalFactHandle handle = (InternalFactHandle) it.next();
sink.assertObject( handle,
context,
workingMemory );
}
}
public void doAttach(BuildContext context) {
}
public short getType() {
return 0;
}
@Override
public BitMask calculateDeclaredMask(Class modifiedClass, List<String> settableProperties) {
throw new UnsupportedOperationException();
}
}
| apache-2.0 |
wilsonsf/Caelum-FJ11 | banco/src/br/com/empresa/banco/sistema/AtualizadorDeContas.java | 570 | package br.com.empresa.banco.sistema;
import br.com.empresa.banco.conta.Conta;
public class AtualizadorDeContas {
private double saldoTotal = 0;
private double selic;
public AtualizadorDeContas(double selic) {
this.selic = selic;
}
public double getSaldoTotal() {
return saldoTotal;
}
public double getSelic() {
return selic;
}
public void roda(Conta conta) {
System.out.println("Saldo anterior: " + conta.getSaldo());
conta.atualiza(selic);
System.out.println("Saldo final: " + conta.getSaldo());
this.saldoTotal += conta.getSaldo();
}
}
| apache-2.0 |
AlexanderSopov/datastruktur | src/lab3/SplayTreeSet.java | 4092 | package lab3;
/**
* Created by oskar on 2016-02-17.
*/
public class SplayTreeSet<E extends Comparable<? super E>> implements SimpleSet<E> {
private int size;
private SplayNode root = null;
private class SplayNode{
private E value;
private SplayNode left, right;
public SplayNode(E value){
this.value = value;
}
}
public SplayTreeSet(){
size = 0;
}
public int size(){
return size;
}
public boolean add(E x){
SplayNode newNode = new SplayNode(x);
if(root == null){
root = newNode;
size++;
return true;
}
//splay(root, x);
if(this.contains(x)) {
return false;
}
else{
if(x.compareTo(root.value) < 0){
newNode.right = root;
newNode.left = root.left;
root.left = null;
root = newNode;
}
else{
newNode.left = root;
newNode.right = root.right;
root.right = null;
root = newNode;
}
}
size++;
return true;
}
public boolean remove(E x){
if(root == null) return false;
root = splay(root, x);
if(root.value.compareTo(x) != 0) return false;
if(root.right == null && root.left == null)
root = null;
else if(root.right == null){
root = root.left;
}
else if(root.left == null) {
root = root.right;
}
else{
SplayNode temp = root.right;
root = root.left;
root = splay(root, x);
root.right = temp;
}
size--;
return true;
}
public boolean contains(E x){
if(root == null) return false;
root = splay(root, x);
return(root.value.compareTo(x) == 0);
}
private SplayNode splay(SplayNode node, E value){
if(node == null){
return null;
}
if(value.compareTo(node.value) < 0){
if(node.left == null){
return node;
}
if(value.compareTo(node.left.value) < 0){
node.left.left = splay(node.left.left, value);
node = rotateRight(node);
}
else if(value.compareTo(node.left.value) > 0){
node.left.right = splay(node.left.right, value);
if(node.left.right != null){
node.left = rotateLeft(node.left);
}
}
if(node.left == null) return node;
else return rotateRight(node);
}
else if(value.compareTo(node.value) > 0){
if(node.right == null){
return node;
}
if(value.compareTo(node.right.value) < 0){
node.right.left = splay(node.right.left, value);
if(node.right.left != null)
node.right = rotateRight(node.right);
}
else if(value.compareTo(node.right.value) > 0){
node.right.right = splay(node.right.right, value);
node = rotateLeft(node);
}
if(node.right == null){
return node;
}
return rotateLeft(node);
}
else {
return node;
}
}
private SplayNode rotateRight(SplayNode node) {
if (node.left == null){
return node;
}
else{
SplayNode newNode = node.left;
node.left = newNode.right;
newNode.right = node;
return newNode;
}
}
private SplayNode rotateLeft(SplayNode node){
if(node.right == null){
return node;
}
else {
SplayNode newNode = node.right;
node.right = newNode.left;
newNode.left = node;
return newNode;
}
}
}
| apache-2.0 |
willitscale/oaigatewayrelay | src/uk/co/n3tw0rk/oaigatewayrelay/events/system/ExtensionChange.java | 2791 | package uk.co.n3tw0rk.oaigatewayrelay.events.system;
/**
*
* @author M00SEMARKTWO
EXTENSION CHANGE EC
NOTE: This event is available only in protocol versions 05.10 and later.
USE: Occurs whenever device extensions are changed in the database, or when a device is equipped
or unequipped.
MONITOR TYPE: System
SYNTAX: EC,<Resync_Code>,<Mon_Cross_Ref_ID>,<Network_Node_Number>,
<Old_Extension>,<New_Extension>,<Device_Type><CR><LF>
Where:
Network_Node_Number: Indicates the node number of the connected phone system. If
the system is not networked, this will be 1. When this event is sent to an application
by the CT Gateway, this field specifies the node number of the phone system that originally
generated the event.
Old_Extension: Displays the old extension number of the device. If blank, the device is
new.
New_Extension: Shows the new extension of the device. If blank, the device has been
removed.
Device_Type: Identifies the device type of the <Old_Extension> and/or
<New_Extension>. Possible values are listed in the table below.
Table 24: EC Device_Type Values
VALUE <DEVICE_TYPE> VALUE <DEVICE_TYPE>
0 Keyset Station Device or
Operator (11)1 18 Off-Node Single-Line Station
Device
1 Single-Line Station Device 19 Off-Node Hunt Group
2 ACD/Hunt Group 20 Off-Node Page Port
3 Loop Start Trunk with (4)1 or without
Caller ID
21 Off-Node Page Zone
5 Ground Start Trunk with (6)1 or
without Caller ID
22 Off-Node Voice Mail
7 DID Trunk 23 ACD Agent
8 E&M Trunk 243 Unassociated Mailbox
9 ISDN Trunk 253 Off-Node Unassociated Mailbox
10 Trunk Group 26 BRI Station
12 Voice Mail Application 27 Off-Node BRI Station
13 Other 28 MFC/R2 Trunk
142 Feature Code 294 Modem*
15 Page Zone 31 MGCP Endpoint
16 Page Port 32 MGCP Gateway and Endpoint
17 Off-Node Keyset Station Device 33 SIP Trunk
*Axxess only
1 Other commands and events separate these devices when reporting types. The number enclosed in
parentheses indicates the value other commands and events use to report the segregated type (i.e.,
the loop start trunk with Caller ID is reported as type 4).
2 Feature-code extensions cannot be removed or created, but they can be changed.
3 These values are valid only for protocol versions 5.20 and later.
4 This value is valid only for protocol versions 8.00 and later.
NOTE: As indicated, the Extension Change event does not always report the
most-detailed device type. To obtain the most-detailed device type, issue a Query
Device Info (_QI) command (see page 149).
EXAMPLES: The extension number for extension 1000 has been changed to 1001.
001,EC,,<Mon_Cross_Ref_ID>,<Network_Node_Number>,1000,1001,1
Extension 1000 changed to a modem*.
001,EC,,<Mon_Cross_Ref_ID>,<Network_Node_Number>,1000,1005,29
* Applies to Axxess only
*/
public class ExtensionChange {
}
| apache-2.0 |
CyanFlxy/CyanFlxyWidget | TestApp/src/main/java/com/cyanflxy/test/MainActivity.java | 2737 | package com.cyanflxy.test;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.TextView;
import java.util.LinkedList;
import java.util.List;
public class MainActivity extends Activity implements AdapterView.OnItemClickListener {
private List<ActivityInfo> activityList;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initActivityList();
ListView list = (ListView) findViewById(R.id.list);
list.setAdapter(new MyAdapter());
list.setOnItemClickListener(this);
startActivity(new Intent(this, SimulateTurnActivity.class));
}
private void initActivityList() {
activityList = new LinkedList<ActivityInfo>();
activityList.add(new ActivityInfo(ColorPickerActivity.class));
activityList.add(new ActivityInfo(SineWaveActivity.class));
activityList.add(new ActivityInfo(CircleAnimationActivity.class));
activityList.add(new ActivityInfo(SimulateTurnActivity.class));
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
ActivityInfo info = (ActivityInfo) view.getTag();
Intent intent = new Intent(this, info.clazz);
startActivity(intent);
}
private class ActivityInfo {
public Class<? extends Activity> clazz;
public String label;
public ActivityInfo(Class<? extends Activity> activityClass) {
clazz = activityClass;
String name = clazz.getSimpleName();
label = name.substring(0, name.length() - 8);
}
}
private class MyAdapter extends BaseAdapter {
@Override
public int getCount() {
return activityList.size();
}
@Override
public Object getItem(int position) {
return activityList.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ActivityInfo info = activityList.get(position);
if (convertView == null) {
convertView = View.inflate(MainActivity.this, R.layout.list_item, null);
}
TextView view = (TextView) convertView;
view.setText(info.label);
view.setTag(info);
return view;
}
}
}
| apache-2.0 |
scorpionvicky/elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java | 46642 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.DynamicTemplate.XContentFieldType;
import java.io.IOException;
import java.time.format.DateTimeParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import static org.elasticsearch.index.mapper.FieldMapper.IGNORE_MALFORMED_SETTING;
/** A parser for documents, given mappings from a DocumentMapper */
final class DocumentParser {
private final IndexSettings indexSettings;
private final DocumentMapperParser docMapperParser;
private final DocumentMapper docMapper;
DocumentParser(IndexSettings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper) {
this.indexSettings = indexSettings;
this.docMapperParser = docMapperParser;
this.docMapper = docMapper;
}
ParsedDocument parseDocument(SourceToParse source, MetadataFieldMapper[] metadataFieldsMappers) throws MapperParsingException {
final Mapping mapping = docMapper.mapping();
final ParseContext.InternalParseContext context;
final XContentType xContentType = source.getXContentType();
try (XContentParser parser = XContentHelper.createParser(docMapperParser.getXContentRegistry(),
LoggingDeprecationHandler.INSTANCE, source.source(), xContentType)) {
context = new ParseContext.InternalParseContext(indexSettings, docMapperParser, docMapper, source, parser);
validateStart(parser);
internalParseDocument(mapping, metadataFieldsMappers, context, parser);
validateEnd(parser);
} catch (Exception e) {
throw wrapInMapperParsingException(source, e);
}
String remainingPath = context.path().pathAsText("");
if (remainingPath.isEmpty() == false) {
throw new IllegalStateException("found leftover path elements: " + remainingPath);
}
context.postParse();
return parsedDocument(source, context, createDynamicUpdate(mapping, docMapper, context.getDynamicMappers()));
}
private static boolean containsDisabledObjectMapper(ObjectMapper objectMapper, String[] subfields) {
for (int i = 0; i < subfields.length - 1; ++i) {
Mapper mapper = objectMapper.getMapper(subfields[i]);
if (mapper instanceof ObjectMapper == false) {
break;
}
objectMapper = (ObjectMapper) mapper;
if (objectMapper.isEnabled() == false) {
return true;
}
}
return false;
}
private static void internalParseDocument(Mapping mapping, MetadataFieldMapper[] metadataFieldsMappers,
ParseContext.InternalParseContext context, XContentParser parser) throws IOException {
final boolean emptyDoc = isEmptyDoc(mapping, parser);
for (MetadataFieldMapper metadataMapper : metadataFieldsMappers) {
metadataMapper.preParse(context);
}
if (mapping.root.isEnabled() == false) {
// entire type is disabled
parser.skipChildren();
} else if (emptyDoc == false) {
parseObjectOrNested(context, mapping.root);
}
for (MetadataFieldMapper metadataMapper : metadataFieldsMappers) {
metadataMapper.postParse(context);
}
}
private static void validateStart(XContentParser parser) throws IOException {
// will result in START_OBJECT
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new MapperParsingException("Malformed content, must start with an object");
}
}
private static void validateEnd(XContentParser parser) throws IOException {
XContentParser.Token token;// only check for end of tokens if we created the parser here
// try to parse the next token, this should be null if the object is ended properly
// but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled by the catch)
token = parser.nextToken();
if (token != null) {
throw new IllegalArgumentException("Malformed content, found extra data after parsing: " + token);
}
}
private static boolean isEmptyDoc(Mapping mapping, XContentParser parser) throws IOException {
if (mapping.root.isEnabled()) {
final XContentParser.Token token = parser.nextToken();
if (token == XContentParser.Token.END_OBJECT) {
// empty doc, we can handle it...
return true;
} else if (token != XContentParser.Token.FIELD_NAME) {
throw new MapperParsingException("Malformed content, after first object, either the type field"
+ " or the actual properties should exist");
}
}
return false;
}
private static ParsedDocument parsedDocument(SourceToParse source, ParseContext.InternalParseContext context, Mapping update) {
return new ParsedDocument(
context.version(),
context.seqID(),
context.sourceToParse().id(),
source.routing(),
context.docs(),
context.sourceToParse().source(),
context.sourceToParse().getXContentType(),
update
);
}
private static MapperParsingException wrapInMapperParsingException(SourceToParse source, Exception e) {
// if its already a mapper parsing exception, no need to wrap it...
if (e instanceof MapperParsingException) {
return (MapperParsingException) e;
}
// Throw a more meaningful message if the document is empty.
if (source.source() != null && source.source().length() == 0) {
return new MapperParsingException("failed to parse, document is empty");
}
return new MapperParsingException("failed to parse", e);
}
private static String[] splitAndValidatePath(String fullFieldPath) {
if (fullFieldPath.contains(".")) {
String[] parts = fullFieldPath.split("\\.");
for (String part : parts) {
if (Strings.hasText(part) == false) {
// check if the field name contains only whitespace
if (Strings.isEmpty(part) == false) {
throw new IllegalArgumentException(
"object field cannot contain only whitespace: ['" + fullFieldPath + "']");
}
throw new IllegalArgumentException(
"object field starting or ending with a [.] makes object resolution ambiguous: [" + fullFieldPath + "]");
}
}
return parts;
} else {
if (Strings.isEmpty(fullFieldPath)) {
throw new IllegalArgumentException("field name cannot be an empty string");
}
return new String[] {fullFieldPath};
}
}
/** Creates a Mapping containing any dynamically added fields, or returns null if there were no dynamic mappings. */
static Mapping createDynamicUpdate(Mapping mapping, DocumentMapper docMapper, List<Mapper> dynamicMappers) {
if (dynamicMappers.isEmpty()) {
return null;
}
// We build a mapping by first sorting the mappers, so that all mappers containing a common prefix
// will be processed in a contiguous block. When the prefix is no longer seen, we pop the extra elements
// off the stack, merging them upwards into the existing mappers.
Collections.sort(dynamicMappers, (Mapper o1, Mapper o2) -> o1.name().compareTo(o2.name()));
Iterator<Mapper> dynamicMapperItr = dynamicMappers.iterator();
List<ObjectMapper> parentMappers = new ArrayList<>();
Mapper firstUpdate = dynamicMapperItr.next();
parentMappers.add(createUpdate(mapping.root(), splitAndValidatePath(firstUpdate.name()), 0, firstUpdate));
Mapper previousMapper = null;
while (dynamicMapperItr.hasNext()) {
Mapper newMapper = dynamicMapperItr.next();
if (previousMapper != null && newMapper.name().equals(previousMapper.name())) {
// We can see the same mapper more than once, for example, if we had foo.bar and foo.baz, where
// foo did not yet exist. This will create 2 copies in dynamic mappings, which should be identical.
// Here we just skip over the duplicates, but we merge them to ensure there are no conflicts.
newMapper.merge(previousMapper);
continue;
}
previousMapper = newMapper;
String[] nameParts = splitAndValidatePath(newMapper.name());
// We first need the stack to only contain mappers in common with the previously processed mapper
// For example, if the first mapper processed was a.b.c, and we now have a.d, the stack will contain
// a.b, and we want to merge b back into the stack so it just contains a
int i = removeUncommonMappers(parentMappers, nameParts);
// Then we need to add back mappers that may already exist within the stack, but are not on it.
// For example, if we processed a.b, followed by an object mapper a.c.d, and now are adding a.c.d.e
// then the stack will only have a on it because we will have already merged a.c.d into the stack.
// So we need to pull a.c, followed by a.c.d, onto the stack so e can be added to the end.
i = expandCommonMappers(parentMappers, nameParts, i);
// If there are still parents of the new mapper which are not on the stack, we need to pull them
// from the existing mappings. In order to maintain the invariant that the stack only contains
// fields which are updated, we cannot simply add the existing mappers to the stack, since they
// may have other subfields which will not be updated. Instead, we pull the mapper from the existing
// mappings, and build an update with only the new mapper and its parents. This then becomes our
// "new mapper", and can be added to the stack.
if (i < nameParts.length - 1) {
newMapper = createExistingMapperUpdate(parentMappers, nameParts, i, docMapper, newMapper);
}
if (newMapper instanceof ObjectMapper) {
parentMappers.add((ObjectMapper)newMapper);
} else {
addToLastMapper(parentMappers, newMapper, true);
}
}
popMappers(parentMappers, 1, true);
assert parentMappers.size() == 1;
return mapping.mappingUpdate(parentMappers.get(0));
}
private static void popMappers(List<ObjectMapper> parentMappers, int keepBefore, boolean merge) {
assert keepBefore >= 1; // never remove the root mapper
// pop off parent mappers not needed by the current mapper,
// merging them backwards since they are immutable
for (int i = parentMappers.size() - 1; i >= keepBefore; --i) {
addToLastMapper(parentMappers, parentMappers.remove(i), merge);
}
}
/**
* Adds a mapper as an update into the last mapper. If merge is true, the new mapper
* will be merged in with other child mappers of the last parent, otherwise it will be a new update.
*/
private static void addToLastMapper(List<ObjectMapper> parentMappers, Mapper mapper, boolean merge) {
assert parentMappers.size() >= 1;
int lastIndex = parentMappers.size() - 1;
ObjectMapper withNewMapper = parentMappers.get(lastIndex).mappingUpdate(mapper);
if (merge) {
withNewMapper = parentMappers.get(lastIndex).merge(withNewMapper);
}
parentMappers.set(lastIndex, withNewMapper);
}
/**
* Removes mappers that exist on the stack, but are not part of the path of the current nameParts,
* Returns the next unprocessed index from nameParts.
*/
private static int removeUncommonMappers(List<ObjectMapper> parentMappers, String[] nameParts) {
int keepBefore = 1;
while (keepBefore < parentMappers.size() &&
parentMappers.get(keepBefore).simpleName().equals(nameParts[keepBefore - 1])) {
++keepBefore;
}
popMappers(parentMappers, keepBefore, true);
return keepBefore - 1;
}
/**
* Adds mappers from the end of the stack that exist as updates within those mappers.
* Returns the next unprocessed index from nameParts.
*/
private static int expandCommonMappers(List<ObjectMapper> parentMappers, String[] nameParts, int i) {
ObjectMapper last = parentMappers.get(parentMappers.size() - 1);
while (i < nameParts.length - 1 && last.getMapper(nameParts[i]) != null) {
Mapper newLast = last.getMapper(nameParts[i]);
assert newLast instanceof ObjectMapper;
last = (ObjectMapper) newLast;
parentMappers.add(last);
++i;
}
return i;
}
/** Creates an update for intermediate object mappers that are not on the stack, but parents of newMapper. */
private static ObjectMapper createExistingMapperUpdate(List<ObjectMapper> parentMappers, String[] nameParts, int i,
DocumentMapper docMapper, Mapper newMapper) {
String updateParentName = nameParts[i];
final ObjectMapper lastParent = parentMappers.get(parentMappers.size() - 1);
if (parentMappers.size() > 1) {
// only prefix with parent mapper if the parent mapper isn't the root (which has a fake name)
updateParentName = lastParent.name() + '.' + nameParts[i];
}
ObjectMapper updateParent = docMapper.objectMappers().get(updateParentName);
assert updateParent != null : updateParentName + " doesn't exist";
return createUpdate(updateParent, nameParts, i + 1, newMapper);
}
/** Build an update for the parent which will contain the given mapper and any intermediate fields. */
private static ObjectMapper createUpdate(ObjectMapper parent, String[] nameParts, int i, Mapper mapper) {
List<ObjectMapper> parentMappers = new ArrayList<>();
ObjectMapper previousIntermediate = parent;
for (; i < nameParts.length - 1; ++i) {
Mapper intermediate = previousIntermediate.getMapper(nameParts[i]);
assert intermediate != null : "Field " + previousIntermediate.name() + " does not have a subfield " + nameParts[i];
assert intermediate instanceof ObjectMapper;
parentMappers.add((ObjectMapper)intermediate);
previousIntermediate = (ObjectMapper)intermediate;
}
if (parentMappers.isEmpty() == false) {
// add the new mapper to the stack, and pop down to the original parent level
addToLastMapper(parentMappers, mapper, false);
popMappers(parentMappers, 1, false);
mapper = parentMappers.get(0);
}
return parent.mappingUpdate(mapper);
}
static void parseObjectOrNested(ParseContext context, ObjectMapper mapper) throws IOException {
if (mapper.isEnabled() == false) {
context.parser().skipChildren();
return;
}
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.VALUE_NULL) {
// the object is null ("obj1" : null), simply bail
return;
}
String currentFieldName = parser.currentName();
if (token.isValue()) {
throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName
+ "] as object, but found a concrete value");
}
ObjectMapper.Nested nested = mapper.nested();
if (nested.isNested()) {
context = nestedContext(context, mapper);
}
// if we are at the end of the previous object, advance
if (token == XContentParser.Token.END_OBJECT) {
token = parser.nextToken();
}
if (token == XContentParser.Token.START_OBJECT) {
// if we are just starting an OBJECT, advance, this is the object we are parsing, we need the name first
token = parser.nextToken();
}
innerParseObject(context, mapper, parser, currentFieldName, token);
// restore the enable path flag
if (nested.isNested()) {
nested(context, nested);
}
}
private static void innerParseObject(ParseContext context, ObjectMapper mapper, XContentParser parser,
String currentFieldName, XContentParser.Token token) throws IOException {
assert token == XContentParser.Token.FIELD_NAME || token == XContentParser.Token.END_OBJECT;
String[] paths = null;
while (token != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
paths = splitAndValidatePath(currentFieldName);
if (containsDisabledObjectMapper(mapper, paths)) {
parser.nextToken();
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_OBJECT) {
parseObject(context, mapper, currentFieldName, paths);
} else if (token == XContentParser.Token.START_ARRAY) {
parseArray(context, mapper, currentFieldName, paths);
} else if (token == XContentParser.Token.VALUE_NULL) {
parseNullValue(context, mapper, currentFieldName, paths);
} else if (token == null) {
throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName
+ "] as object, but got EOF, has a concrete value been provided to it?");
} else if (token.isValue()) {
parseValue(context, mapper, currentFieldName, token, paths);
}
token = parser.nextToken();
}
}
private static void nested(ParseContext context, ObjectMapper.Nested nested) {
ParseContext.Document nestedDoc = context.doc();
ParseContext.Document parentDoc = nestedDoc.getParent();
Settings settings = context.indexSettings().getSettings();
if (nested.isIncludeInParent()) {
addFields(settings, nestedDoc, parentDoc);
}
if (nested.isIncludeInRoot()) {
ParseContext.Document rootDoc = context.rootDoc();
// don't add it twice, if its included in parent, and we are handling the master doc...
if (!nested.isIncludeInParent() || parentDoc != rootDoc) {
addFields(settings, nestedDoc, rootDoc);
}
}
}
private static void addFields(Settings settings, ParseContext.Document nestedDoc, ParseContext.Document rootDoc) {
String nestedPathFieldName = NestedPathFieldMapper.name(settings);
for (IndexableField field : nestedDoc.getFields()) {
if (field.name().equals(nestedPathFieldName) == false) {
rootDoc.add(field);
}
}
}
private static ParseContext nestedContext(ParseContext context, ObjectMapper mapper) {
context = context.createNestedContext(mapper.fullPath());
ParseContext.Document nestedDoc = context.doc();
ParseContext.Document parentDoc = nestedDoc.getParent();
// We need to add the uid or id to this nested Lucene document too,
// If we do not do this then when a document gets deleted only the root Lucene document gets deleted and
// not the nested Lucene documents! Besides the fact that we would have zombie Lucene documents, the ordering of
// documents inside the Lucene index (document blocks) will be incorrect, as nested documents of different root
// documents are then aligned with other root documents. This will lead tothe nested query, sorting, aggregations
// and inner hits to fail or yield incorrect results.
IndexableField idField = parentDoc.getField(IdFieldMapper.NAME);
if (idField != null) {
// We just need to store the id as indexed field, so that IndexWriter#deleteDocuments(term) can then
// delete it when the root document is deleted too.
nestedDoc.add(new Field(IdFieldMapper.NAME, idField.binaryValue(), IdFieldMapper.Defaults.NESTED_FIELD_TYPE));
} else {
throw new IllegalStateException("The root document of a nested document should have an _id field");
}
nestedDoc.add(NestedPathFieldMapper.field(context.indexSettings().getSettings(), mapper.nestedTypePath()));
return context;
}
private static void parseObjectOrField(ParseContext context, Mapper mapper) throws IOException {
if (mapper instanceof ObjectMapper) {
parseObjectOrNested(context, (ObjectMapper) mapper);
} else if (mapper instanceof FieldMapper) {
FieldMapper fieldMapper = (FieldMapper) mapper;
fieldMapper.parse(context);
parseCopyFields(context, fieldMapper.copyTo().copyToFields());
} else if (mapper instanceof FieldAliasMapper) {
throw new IllegalArgumentException("Cannot write to a field alias [" + mapper.name() + "].");
} else {
throw new IllegalStateException("The provided mapper [" + mapper.name() + "] has an unrecognized type [" +
mapper.getClass().getSimpleName() + "].");
}
}
private static void parseObject(final ParseContext context, ObjectMapper mapper, String currentFieldName,
String[] paths) throws IOException {
assert currentFieldName != null;
Mapper objectMapper = getMapper(context, mapper, currentFieldName, paths);
if (objectMapper != null) {
context.path().add(currentFieldName);
parseObjectOrField(context, objectMapper);
context.path().remove();
} else {
currentFieldName = paths[paths.length - 1];
Tuple<Integer, ObjectMapper> parentMapperTuple = getDynamicParentMapper(context, paths, mapper);
ObjectMapper parentMapper = parentMapperTuple.v2();
ObjectMapper.Dynamic dynamic = dynamicOrDefault(parentMapper, context);
if (dynamic == ObjectMapper.Dynamic.STRICT) {
throw new StrictDynamicMappingException(mapper.fullPath(), currentFieldName);
} else if (dynamic == ObjectMapper.Dynamic.TRUE) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, XContentFieldType.OBJECT);
if (builder == null) {
builder = new ObjectMapper.Builder(currentFieldName).enabled(true);
}
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings().getSettings(), context.path());
objectMapper = builder.build(builderContext);
context.addDynamicMapper(objectMapper);
context.path().add(currentFieldName);
parseObjectOrField(context, objectMapper);
context.path().remove();
} else {
// not dynamic, read everything up to end object
context.parser().skipChildren();
}
for (int i = 0; i < parentMapperTuple.v1(); i++) {
context.path().remove();
}
}
}
private static void parseArray(ParseContext context, ObjectMapper parentMapper, String lastFieldName,
String[] paths) throws IOException {
String arrayFieldName = lastFieldName;
Mapper mapper = getMapper(context, parentMapper, lastFieldName, paths);
if (mapper != null) {
// There is a concrete mapper for this field already. Need to check if the mapper
// expects an array, if so we pass the context straight to the mapper and if not
// we serialize the array components
if (parsesArrayValue(mapper)) {
parseObjectOrField(context, mapper);
} else {
parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
}
} else {
arrayFieldName = paths[paths.length - 1];
lastFieldName = arrayFieldName;
Tuple<Integer, ObjectMapper> parentMapperTuple = getDynamicParentMapper(context, paths, parentMapper);
parentMapper = parentMapperTuple.v2();
ObjectMapper.Dynamic dynamic = dynamicOrDefault(parentMapper, context);
if (dynamic == ObjectMapper.Dynamic.STRICT) {
throw new StrictDynamicMappingException(parentMapper.fullPath(), arrayFieldName);
} else if (dynamic == ObjectMapper.Dynamic.TRUE) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, arrayFieldName, XContentFieldType.OBJECT);
if (builder == null) {
parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
} else {
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings().getSettings(), context.path());
mapper = builder.build(builderContext);
assert mapper != null;
if (parsesArrayValue(mapper)) {
context.addDynamicMapper(mapper);
context.path().add(arrayFieldName);
parseObjectOrField(context, mapper);
context.path().remove();
} else {
parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
}
}
} else {
// TODO: shouldn't this skip, not parse?
parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
}
for (int i = 0; i < parentMapperTuple.v1(); i++) {
context.path().remove();
}
}
}
private static boolean parsesArrayValue(Mapper mapper) {
return mapper instanceof FieldMapper && ((FieldMapper) mapper).parsesArrayValue();
}
private static void parseNonDynamicArray(ParseContext context, ObjectMapper mapper,
final String lastFieldName, String arrayFieldName) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token;
final String[] paths = splitAndValidatePath(lastFieldName);
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
parseObject(context, mapper, lastFieldName, paths);
} else if (token == XContentParser.Token.START_ARRAY) {
parseArray(context, mapper, lastFieldName, paths);
} else if (token == XContentParser.Token.VALUE_NULL) {
parseNullValue(context, mapper, lastFieldName, paths);
} else if (token == null) {
throw new MapperParsingException("object mapping for [" + mapper.name() + "] with array for [" + arrayFieldName
+ "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?");
} else {
assert token.isValue();
parseValue(context, mapper, lastFieldName, token, paths);
}
}
}
private static void parseValue(final ParseContext context, ObjectMapper parentMapper,
String currentFieldName, XContentParser.Token token, String[] paths) throws IOException {
if (currentFieldName == null) {
throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with"
+ " no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
Mapper mapper = getMapper(context, parentMapper, currentFieldName, paths);
if (mapper != null) {
parseObjectOrField(context, mapper);
} else {
currentFieldName = paths[paths.length - 1];
Tuple<Integer, ObjectMapper> parentMapperTuple = getDynamicParentMapper(context, paths, parentMapper);
parentMapper = parentMapperTuple.v2();
parseDynamicValue(context, parentMapper, currentFieldName, token);
for (int i = 0; i < parentMapperTuple.v1(); i++) {
context.path().remove();
}
}
}
private static void parseNullValue(ParseContext context, ObjectMapper parentMapper, String lastFieldName,
String[] paths) throws IOException {
// we can only handle null values if we have mappings for them
Mapper mapper = getMapper(context, parentMapper, lastFieldName, paths);
if (mapper != null) {
// TODO: passing null to an object seems bogus?
parseObjectOrField(context, mapper);
} else if (parentMapper.dynamic() == ObjectMapper.Dynamic.STRICT) {
throw new StrictDynamicMappingException(parentMapper.fullPath(), lastFieldName);
}
}
private static Mapper.Builder<?> newLongBuilder(String name, Settings settings) {
return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.LONG, settings);
}
private static Mapper.Builder<?> newFloatBuilder(String name, Settings settings) {
return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.FLOAT, settings);
}
private static Mapper.Builder<?> createBuilderFromDynamicValue(final ParseContext context,
XContentParser.Token token,
String currentFieldName) throws IOException {
if (token == XContentParser.Token.VALUE_STRING) {
String text = context.parser().text();
boolean parseableAsLong = false;
try {
Long.parseLong(text);
parseableAsLong = true;
} catch (NumberFormatException e) {
// not a long number
}
boolean parseableAsDouble = false;
try {
Double.parseDouble(text);
parseableAsDouble = true;
} catch (NumberFormatException e) {
// not a double number
}
if (parseableAsLong && context.root().numericDetection()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, XContentFieldType.LONG);
if (builder == null) {
builder = newLongBuilder(currentFieldName, context.indexSettings().getSettings());
}
return builder;
} else if (parseableAsDouble && context.root().numericDetection()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, XContentFieldType.DOUBLE);
if (builder == null) {
builder = newFloatBuilder(currentFieldName, context.indexSettings().getSettings());
}
return builder;
} else if (parseableAsLong == false && parseableAsDouble == false && context.root().dateDetection()) {
// We refuse to match pure numbers, which are too likely to be
// false positives with date formats that include eg.
// `epoch_millis` or `YYYY`
for (DateFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
try {
dateTimeFormatter.parse(text);
} catch (ElasticsearchParseException | DateTimeParseException | IllegalArgumentException e) {
// failure to parse this, continue
continue;
}
Mapper.Builder builder
= context.root().findTemplateBuilder(context, currentFieldName, dateTimeFormatter);
if (builder == null) {
boolean ignoreMalformed = IGNORE_MALFORMED_SETTING.get(context.indexSettings().getSettings());
builder = new DateFieldMapper.Builder(currentFieldName, DateFieldMapper.Resolution.MILLISECONDS,
dateTimeFormatter, ignoreMalformed, Version.indexCreated(context.indexSettings().getSettings()));
}
return builder;
}
}
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, XContentFieldType.STRING);
if (builder == null) {
builder = new TextFieldMapper.Builder(currentFieldName)
.addMultiField(new KeywordFieldMapper.Builder("keyword").ignoreAbove(256));
}
return builder;
} else if (token == XContentParser.Token.VALUE_NUMBER) {
XContentParser.NumberType numberType = context.parser().numberType();
if (numberType == XContentParser.NumberType.INT
|| numberType == XContentParser.NumberType.LONG
|| numberType == XContentParser.NumberType.BIG_INTEGER) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, XContentFieldType.LONG);
if (builder == null) {
builder = newLongBuilder(currentFieldName, context.indexSettings().getSettings());
}
return builder;
} else if (numberType == XContentParser.NumberType.FLOAT
|| numberType == XContentParser.NumberType.DOUBLE
|| numberType == XContentParser.NumberType.BIG_DECIMAL) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, XContentFieldType.DOUBLE);
if (builder == null) {
// no templates are defined, we use float by default instead of double
// since this is much more space-efficient and should be enough most of
// the time
builder = newFloatBuilder(currentFieldName, context.indexSettings().getSettings());
}
return builder;
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, XContentFieldType.BOOLEAN);
if (builder == null) {
builder = new BooleanFieldMapper.Builder(currentFieldName);
}
return builder;
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, XContentFieldType.BINARY);
if (builder == null) {
builder = new BinaryFieldMapper.Builder(currentFieldName);
}
return builder;
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, XContentFieldType.STRING);
if (builder != null) {
return builder;
}
}
// TODO how do we identify dynamically that its a binary value?
throw new IllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name ["
+ currentFieldName + "]");
}
private static void parseDynamicValue(final ParseContext context, ObjectMapper parentMapper,
String currentFieldName, XContentParser.Token token) throws IOException {
ObjectMapper.Dynamic dynamic = dynamicOrDefault(parentMapper, context);
if (dynamic == ObjectMapper.Dynamic.STRICT) {
throw new StrictDynamicMappingException(parentMapper.fullPath(), currentFieldName);
}
if (dynamic == ObjectMapper.Dynamic.FALSE) {
return;
}
final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings().getSettings(), context.path());
final Mapper.Builder<?> builder = createBuilderFromDynamicValue(context, token, currentFieldName);
Mapper mapper = builder.build(builderContext);
context.addDynamicMapper(mapper);
parseObjectOrField(context, mapper);
}
/** Creates instances of the fields that the current field should be copied to */
private static void parseCopyFields(ParseContext context, List<String> copyToFields) throws IOException {
if (!context.isWithinCopyTo() && copyToFields.isEmpty() == false) {
context = context.createCopyToContext();
for (String field : copyToFields) {
// In case of a hierarchy of nested documents, we need to figure out
// which document the field should go to
ParseContext.Document targetDoc = null;
for (ParseContext.Document doc = context.doc(); doc != null; doc = doc.getParent()) {
if (field.startsWith(doc.getPrefix())) {
targetDoc = doc;
break;
}
}
assert targetDoc != null;
final ParseContext copyToContext;
if (targetDoc == context.doc()) {
copyToContext = context;
} else {
copyToContext = context.switchDoc(targetDoc);
}
parseCopy(field, copyToContext);
}
}
}
/** Creates an copy of the current field with given field name and boost */
private static void parseCopy(String field, ParseContext context) throws IOException {
Mapper mapper = context.docMapper().mappers().getMapper(field);
if (mapper != null) {
if (mapper instanceof FieldMapper) {
((FieldMapper) mapper).parse(context);
} else if (mapper instanceof FieldAliasMapper) {
throw new IllegalArgumentException("Cannot copy to a field alias [" + mapper.name() + "].");
} else {
throw new IllegalStateException("The provided mapper [" + mapper.name() +
"] has an unrecognized type [" + mapper.getClass().getSimpleName() + "].");
}
} else {
// The path of the dest field might be completely different from the current one so we need to reset it
context = context.overridePath(new ContentPath(0));
final String[] paths = splitAndValidatePath(field);
final String fieldName = paths[paths.length-1];
Tuple<Integer, ObjectMapper> parentMapperTuple = getDynamicParentMapper(context, paths, null);
ObjectMapper objectMapper = parentMapperTuple.v2();
parseDynamicValue(context, objectMapper, fieldName, context.parser().currentToken());
for (int i = 0; i < parentMapperTuple.v1(); i++) {
context.path().remove();
}
}
}
private static Tuple<Integer, ObjectMapper> getDynamicParentMapper(ParseContext context, final String[] paths,
ObjectMapper currentParent) {
ObjectMapper mapper = currentParent == null ? context.root() : currentParent;
int pathsAdded = 0;
ObjectMapper parent = mapper;
for (int i = 0; i < paths.length-1; i++) {
String currentPath = context.path().pathAsText(paths[i]);
Mapper existingFieldMapper = context.docMapper().mappers().getMapper(currentPath);
if (existingFieldMapper != null) {
throw new MapperParsingException(
"Could not dynamically add mapping for field [{}]. Existing mapping for [{}] must be of type object but found [{}].",
null, String.join(".", paths), currentPath, existingFieldMapper.typeName());
}
mapper = context.docMapper().objectMappers().get(currentPath);
if (mapper == null) {
// One mapping is missing, check if we are allowed to create a dynamic one.
ObjectMapper.Dynamic dynamic = dynamicOrDefault(parent, context);
switch (dynamic) {
case STRICT:
throw new StrictDynamicMappingException(parent.fullPath(), paths[i]);
case TRUE:
Mapper.Builder builder = context.root().findTemplateBuilder(context, paths[i], XContentFieldType.OBJECT);
if (builder == null) {
builder = new ObjectMapper.Builder(paths[i]).enabled(true);
}
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings().getSettings(),
context.path());
mapper = (ObjectMapper) builder.build(builderContext);
if (mapper.nested() != ObjectMapper.Nested.NO) {
throw new MapperParsingException("It is forbidden to create dynamic nested objects (["
+ context.path().pathAsText(paths[i]) + "]) through `copy_to` or dots in field names");
}
context.addDynamicMapper(mapper);
break;
case FALSE:
// Should not dynamically create any more mappers so return the last mapper
return new Tuple<>(pathsAdded, parent);
}
}
context.path().add(paths[i]);
pathsAdded++;
parent = mapper;
}
return new Tuple<>(pathsAdded, mapper);
}
// find what the dynamic setting is given the current parse context and parent
private static ObjectMapper.Dynamic dynamicOrDefault(ObjectMapper parentMapper, ParseContext context) {
ObjectMapper.Dynamic dynamic = parentMapper.dynamic();
while (dynamic == null) {
int lastDotNdx = parentMapper.name().lastIndexOf('.');
if (lastDotNdx == -1) {
// no dot means we the parent is the root, so just delegate to the default outside the loop
break;
}
String parentName = parentMapper.name().substring(0, lastDotNdx);
parentMapper = context.docMapper().objectMappers().get(parentName);
if (parentMapper == null) {
// If parentMapper is ever null, it means the parent of the current mapper was dynamically created.
// But in order to be created dynamically, the dynamic setting of that parent was necessarily true
return ObjectMapper.Dynamic.TRUE;
}
dynamic = parentMapper.dynamic();
}
if (dynamic == null) {
return context.root().dynamic() == null ? ObjectMapper.Dynamic.TRUE : context.root().dynamic();
}
return dynamic;
}
// looks up a child mapper, but takes into account field names that expand to objects
private static Mapper getMapper(final ParseContext context, ObjectMapper objectMapper, String fieldName, String[] subfields) {
String fieldPath = context.path().pathAsText(fieldName);
// Check if mapper is a metadata mapper first
Mapper mapper = context.docMapper().mapping().getMetadataMapper(fieldPath);
if (mapper != null) {
return mapper;
}
for (int i = 0; i < subfields.length - 1; ++i) {
mapper = objectMapper.getMapper(subfields[i]);
if (mapper == null || (mapper instanceof ObjectMapper) == false) {
return null;
}
objectMapper = (ObjectMapper)mapper;
if (objectMapper.nested().isNested()) {
throw new MapperParsingException("Cannot add a value for field ["
+ fieldName + "] since one of the intermediate objects is mapped as a nested object: ["
+ mapper.name() + "]");
}
}
return objectMapper.getMapper(subfields[subfields.length - 1]);
}
}
| apache-2.0 |
Subterranean-Security/Crimson | src/main/java/com/subterranean_security/crimson/core/util/ValidationUtil.java | 5508 | /******************************************************************************
* *
* Copyright 2016 Subterranean Security *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
* *
*****************************************************************************/
package com.subterranean_security.crimson.core.util;
import java.io.File;
import java.util.regex.Pattern;
import javax.swing.JPasswordField;
public final class ValidationUtil {
private ValidationUtil() {
}
private static final Pattern PATTERN_VALID_USER = Pattern.compile("^[a-zA-Z0-9]*$");
/**
* Validate a user name
*
* @param user
* @return True if user is a valid username
*/
public static boolean username(String user) {
if (user.length() < 4 || user.length() > 60) {
return false;
}
return PATTERN_VALID_USER.matcher(user).matches();
}
private static final Pattern PATTERN_VALID_GROUP = Pattern.compile("^[a-zA-Z0-9 ]*$");
/**
* Validate a group name
*
* @param group
* @return True if group is a valid group name
*/
public static boolean group(String group) {
if (group.length() < 4 || group.length() > 60) {
return false;
}
return PATTERN_VALID_GROUP.matcher(group).matches();
}
/**
* Validate a password
*
* @param field
* @return True if the given JPasswordField contains a valid password.
*/
public static boolean password(JPasswordField field) {
char[] password = field.getPassword();
boolean outcome = true;
if (password.length < 4 || password.length > 64) {
outcome = false;
}
RandomUtil.clearChar(password);
return outcome;
}
private static final Pattern PATTERN_VALID_DNS = Pattern
.compile("^((?!-)[A-Za-z0-9-]{1,63}(?<!-)\\.)+[A-Za-z]{2,6}$");
/**
* Validate a DNS name
*
* @param dns
* @return True if dns is a valid DNS name
*/
public static boolean dns(String dns) {
if (dns == null) {
return false;
}
return PATTERN_VALID_DNS.matcher(dns).find();
}
private static final Pattern PATTERN_VALID_IPV4 = Pattern
.compile("^([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." + "([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\."
+ "([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." + "([01]?\\d\\d?|2[0-4]\\d|25[0-5])$");
/**
* Validate an IP
*
* @param ip
* @return True if ip is a valid IP address
*/
public static boolean ipv4(String ip) {
if (ip == null) {
return false;
}
return PATTERN_VALID_IPV4.matcher(ip).matches();
}
private static final Pattern PATTERN_VALID_PRIVATE_IPV4 = Pattern
.compile("(^127\\.)|(^10\\.)|(^172\\.1[6-9]\\.)|(^172\\.2[0-9]\\.)|(^172\\.3[0-1]\\.)|(^192\\.168\\.)");
/**
* Validate a private IP
*
* @param ip
* @return True if ip is a valid private IP address
*/
public static boolean privateIP(String ip) {
return ipv4(ip) && PATTERN_VALID_PRIVATE_IPV4.matcher(ip).find();
}
/**
* Validate a port number
*
* @param port
* @return True if port is an integer and a valid port number
*/
public static boolean port(String port) {
try {
return port(Integer.parseInt(port));
} catch (Throwable t) {
return false;
}
}
/**
* Validate a port number
*
* @param port
* @return True if port is within range
*/
public static boolean port(int port) {
return (port > 0 && port < 65536);
}
/**
* Validate a filesystem path
*
* @param path
* @return True if path is a valid path
*/
public static boolean path(String path) {
try {
new File(path).getCanonicalPath();
} catch (Throwable e) {
return false;
}
return true;
}
public static boolean serial(String key) {
if (key.length() != 16) {
return false;
}
if (!key.matches("^[A-Z0-9]*$")) {
return false;
}
return true;
}
private static final Pattern PATTERN_VALID_EMAIL = Pattern
.compile("^[_A-Za-z0-9-\\+]+(\\.[_A-Za-z0-9-]+)*@[A-Za-z0-9-]+(\\.[A-Za-z0-9]+)*(\\.[A-Za-z]{2,})$");
/**
* Validate an email address
*
* @param email
* @return True if email is a valid email
*/
public static boolean email(String email) {
return PATTERN_VALID_EMAIL.matcher(email).matches();
}
/**
* Validate a keylogger flush value. This is either a number of events or a
* period.
*
* @param value
* @return True if value is a valid keylogger flush value
*/
public static boolean keyloggerFlushNumber(String value) {
try {
return (Integer.parseInt(value) > 0);
} catch (Throwable e) {
return false;
}
}
} | apache-2.0 |
franck-benault/BDD-Examples | 01a-cucumber-junit/src/test/java/net/franckbenault/bdd/calculator/RunCukesTest.java | 306 | package net.franckbenault.bdd.calculator;
import cucumber.api.CucumberOptions;
import cucumber.api.junit.Cucumber;
import org.junit.runner.RunWith;
@RunWith(Cucumber.class)
@CucumberOptions(plugin = {"json:target/cucumber-calculator.json","html:target/cucumber-calculator"})
public class RunCukesTest {
} | apache-2.0 |
testify-project/testify | modules/di/di-hk2/src/main/java/org/testifyproject/di/hk2/HK2Properties.java | 949 | /*
* Copyright 2016-2017 Testify Project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.testifyproject.di.hk2;
/**
* A class that defines a list of common HK2 properties.
*
* @author saden
*/
public class HK2Properties {
/**
* The location of HK2 default descriptor file.
*/
public static final String DEFAULT_DESCRIPTOR = "META-INF/hk2-locator/default";
private HK2Properties() {
}
}
| apache-2.0 |
basho/riak-java-client | src/main/java/com/basho/riak/client/api/commands/mapreduce/SearchMapReduce.java | 1767 | package com.basho.riak.client.api.commands.mapreduce;
/**
* Command used to perform a map reduce operation with a search query as input.
* @author Dave Rusek <drusek at basho dot com>
* @since 2.0
*/
public class SearchMapReduce extends MapReduce
{
protected SearchMapReduce(SearchInput input, Builder builder)
{
super(input, builder);
}
/**
* Builder for a SearchMapReduce command.
*/
public static class Builder extends MapReduce.Builder<Builder>
{
private String index;
private String query;
@Override
protected Builder self()
{
return this;
}
/**
* Set the index to search.
* @param index The index to run the search on
* @return a reference to this object.
*/
public Builder withIndex(String index)
{
this.index = index;
return this;
}
/**
* Set the query to run.
* @param query The query to run
* @return a reference to this object.
*/
public Builder withQuery(String query)
{
this.query = query;
return this;
}
/**
* Construct a new SearchMapReduce operation.
* @return the new SearchMapReduce operation.
*/
public SearchMapReduce build()
{
if (index == null)
{
throw new IllegalStateException("An index must be specified");
}
if (query == null)
{
throw new IllegalStateException("A query must be specified");
}
return new SearchMapReduce(new SearchInput(index, query), this);
}
}
}
| apache-2.0 |
lpj1986/learn-javase | src/main/java/com/hitsoysauce/gui/awt/MyFrame.java | 598 | package com.hitsoysauce.gui.awt;
import java.awt.Color;
import java.awt.Frame;
import java.awt.Panel;
public class MyFrame extends Frame {
private static final long serialVersionUID = 8447708163884981021L;
public MyFrame(String title) {
super(title);
}
public static void main(String[] args) {
MyFrame frame = new MyFrame("First GUI APP");
frame.setSize(500, 300);
frame.setBackground(Color.GRAY);
frame.setVisible(true);
Panel panel = new Panel();
panel.setSize(100, 100);
panel.setBackground(Color.GREEN);
frame.add(panel);
}
}
| apache-2.0 |
ketan/gocd | server/src/main/java/com/thoughtworks/go/server/util/RequestUtils.java | 1075 | /*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.util;
import javax.servlet.MultipartConfigElement;
import javax.servlet.http.HttpServletRequest;
import static org.eclipse.jetty.server.Request.MULTIPART_CONFIG_ELEMENT;
public class RequestUtils {
public static void configureMultipart(HttpServletRequest req) {
req.setAttribute(MULTIPART_CONFIG_ELEMENT, new MultipartConfigElement(System.getProperty("java.io.tmpdir", "tmp")));
}
private RequestUtils() {
}
}
| apache-2.0 |
naver/ngrinder | ngrinder-controller/src/main/java/org/ngrinder/security/PluggablePreAuthFilter.java | 2031 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ngrinder.security;
import org.ngrinder.extension.OnPreAuthServletFilter;
import org.ngrinder.infra.plugin.PluginManager;
import org.springframework.stereotype.Component;
import org.springframework.web.filter.CompositeFilter;
import javax.servlet.*;
import java.io.IOException;
import java.util.List;
/**
* Proxy filter which run combined preauth plugins.
*
* @since 3.0
*/
@Component
public class PluggablePreAuthFilter implements Filter {
private final CompositeFilter compositeFilter = new CompositeFilter();
/**
* load the servlet filter plugins.
*/
public void loadPlugins(PluginManager pluginManager) {
List<OnPreAuthServletFilter> enabledModulesByClass = pluginManager.getEnabledModulesByClass(OnPreAuthServletFilter.class);
this.compositeFilter.setFilters(enabledModulesByClass);
}
/*
* (non-Javadoc)
*
* @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest,
* javax.servlet.ServletResponse, javax.servlet.FilterChain)
*/
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException,
ServletException {
this.compositeFilter.doFilter(request, response, chain);
}
/*
* (non-Javadoc)
*
* @see javax.servlet.Filter#destroy()
*/
@Override
public void destroy() {
this.compositeFilter.destroy();
}
@Override
public void init(FilterConfig filterConfig) throws ServletException {
this.compositeFilter.init(filterConfig);
}
}
| apache-2.0 |
waans11/incubator-asterixdb | asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ALineSerializerDeserializer.java | 3025 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.dataflow.data.nontagged.serde;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.asterix.dataflow.data.nontagged.Coordinate;
import org.apache.asterix.om.base.ALine;
import org.apache.asterix.om.base.APoint;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public class ALineSerializerDeserializer implements ISerializerDeserializer<ALine> {
private static final long serialVersionUID = 1L;
public static final ALineSerializerDeserializer INSTANCE = new ALineSerializerDeserializer();
private ALineSerializerDeserializer() {
}
@Override
public ALine deserialize(DataInput in) throws HyracksDataException {
try {
APoint p1 = APointSerializerDeserializer.INSTANCE.deserialize(in);
APoint p2 = APointSerializerDeserializer.INSTANCE.deserialize(in);
return new ALine(p1, p2);
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
@Override
public void serialize(ALine instance, DataOutput out) throws HyracksDataException {
try {
out.writeDouble(instance.getP1().getX());
out.writeDouble(instance.getP1().getY());
out.writeDouble(instance.getP2().getX());
out.writeDouble(instance.getP2().getY());
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
public final static int getStartPointCoordinateOffset(Coordinate coordinate) throws HyracksDataException {
switch (coordinate) {
case X:
return 1;
case Y:
return 9;
default:
throw new HyracksDataException("Wrong coordinate");
}
}
public final static int getEndPointCoordinateOffset(Coordinate coordinate) throws HyracksDataException {
switch (coordinate) {
case X:
return 17;
case Y:
return 25;
default:
throw new HyracksDataException("Wrong coordinate");
}
}
}
| apache-2.0 |
woodcomputing/fxslang | src/main/java/com/woodcomputing/fxslang/collections/ObservableSlangList.java | 2182 | /*
* FXSlang
* Copyright 2016 Jonathan Wood
* Licensed under the Apache License, Version 2.0
*/
package com.woodcomputing.fxslang.collections;
import java.util.function.Predicate;
import javafx.collections.ModifiableObservableListBase;
import javaslang.collection.List;
/**
*
* @author Jonathan Wood
*
*/
public class ObservableSlangList<T> extends ModifiableObservableListBase<T> {
private List<T> delegate;
private ObservableSlangList() {
delegate = List.empty();
}
private ObservableSlangList(T element) {
delegate = List.of(element);
}
private ObservableSlangList(T... elements) {
delegate = List.of(elements);
}
private ObservableSlangList(Iterable<? extends T> elements) {
delegate = List.ofAll(elements);
}
public static <T> ObservableSlangList<T> empty() {
return new ObservableSlangList();
}
public static <T> ObservableSlangList<T> of(T element) {
return new ObservableSlangList(element);
}
public static <T> ObservableSlangList<T> of(T... elements) {
return new ObservableSlangList(elements);
}
public static <T> ObservableSlangList<T> ofAll(Iterable<? extends T> elements) {
return new ObservableSlangList(elements);
}
public ObservableSlangList<T> filter(Predicate<? super T> predicate) {
ObservableSlangList<T> obsList = ObservableSlangList.ofAll(delegate.filter(predicate));
return obsList;
}
@Override
public T get(int index) {
return delegate.get(index);
}
@Override
public int size() {
return delegate.size();
}
@Override
protected void doAdd(int index, T element) {
delegate = delegate.insert(index, element);
}
@Override
protected T doSet(int index, T element) {
T oldElement = delegate.get(index);
delegate = delegate.update(index, element);
return oldElement;
}
@Override
protected T doRemove(int index) {
T removedElement = delegate.get(index);
delegate = delegate.removeAt(index);
return removedElement;
}
}
| apache-2.0 |
BubbleOctopus/Album | library/src/main/java/com/alexvasilkov/gestures/transition/internal/FromListViewListener.java | 3862 | package com.alexvasilkov.gestures.transition.internal;
import android.graphics.Rect;
import android.support.annotation.NonNull;
import android.view.View;
import android.widget.AbsListView;
import android.widget.ListView;
import com.alexvasilkov.gestures.animation.ViewPositionAnimator;
import com.alexvasilkov.gestures.transition.ViewsCoordinator;
import com.alexvasilkov.gestures.transition.ViewsTracker;
import com.alexvasilkov.gestures.transition.ViewsTransitionAnimator;
public class FromListViewListener<ID> implements ViewsCoordinator.OnRequestViewListener<ID> {
private static final Rect LOCATION_PARENT = new Rect(), LOCATION = new Rect();
private final ListView mListView;
private final ViewsTracker<ID> mTracker;
private final ViewsTransitionAnimator<ID> mAnimator;
private ID mId;
private boolean mScrollHalfVisibleItems;
public FromListViewListener(@NonNull ListView listView,
@NonNull ViewsTracker<ID> tracker,
@NonNull ViewsTransitionAnimator<ID> animator) {
mListView = listView;
mTracker = tracker;
mAnimator = animator;
mListView.setOnScrollListener(new ScrollListener());
mAnimator.addPositionUpdateListener(new UpdateListener());
}
@Override
public void onRequestView(@NonNull ID id) {
// Trying to find requested view on screen. If it is not currently on screen
// or it is not fully visible than we should scroll to it at first.
mId = id;
int position = mTracker.getPositionForId(id);
if (position == ViewsTracker.NO_POSITION) {
return; // Nothing we can do
}
View view = mTracker.getViewForPosition(position);
if (view == null) {
mListView.setSelection(position);
} else {
mAnimator.setFromView(id, view);
if (mScrollHalfVisibleItems) {
mListView.getGlobalVisibleRect(LOCATION_PARENT);
LOCATION_PARENT.left += mListView.getPaddingLeft();
LOCATION_PARENT.right -= mListView.getPaddingRight();
LOCATION_PARENT.top += mListView.getPaddingTop();
LOCATION_PARENT.bottom -= mListView.getPaddingBottom();
view.getGlobalVisibleRect(LOCATION);
if (!LOCATION_PARENT.contains(LOCATION)
|| view.getWidth() > LOCATION.width()
|| view.getHeight() > LOCATION.height()) {
mListView.setSelection(position);
}
}
}
}
private class ScrollListener implements AbsListView.OnScrollListener {
@Override
public void onScroll(AbsListView view, int firstVisible, int visibleCount, int totalCount) {
if (mId == null) {
return; // Nothing to do
}
for (int position = firstVisible; position < firstVisible + visibleCount; position++) {
if (mId.equals(mTracker.getIdForPosition(position))) {
View from = mTracker.getViewForPosition(position);
if (from != null) {
mAnimator.setFromView(mId, from);
}
}
}
}
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
// No-op
}
}
private class UpdateListener implements ViewPositionAnimator.PositionUpdateListener {
@Override
public void onPositionUpdate(float state, boolean isLeaving) {
if (state == 0f && isLeaving) {
mId = null;
}
mListView.setVisibility(state == 1f && !isLeaving ? View.INVISIBLE : View.VISIBLE);
mScrollHalfVisibleItems = state == 1f; // Only scroll if we in full mode
}
}
}
| apache-2.0 |
boundary/zoocreeper | src/main/java/com/boundary/zoocreeper/ZooKeeperPathOptionHandler.java | 1765 | /**
* Copyright 2013 Boundary, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.boundary.zoocreeper;
import org.apache.zookeeper.common.PathUtils;
import org.kohsuke.args4j.CmdLineException;
import org.kohsuke.args4j.CmdLineParser;
import org.kohsuke.args4j.OptionDef;
import org.kohsuke.args4j.spi.OptionHandler;
import org.kohsuke.args4j.spi.Parameters;
import org.kohsuke.args4j.spi.Setter;
/**
* Option handler for a ZooKeeper path.
*/
public class ZooKeeperPathOptionHandler extends OptionHandler<String> {
public ZooKeeperPathOptionHandler(CmdLineParser parser, OptionDef option, Setter<? super String> setter) {
super(parser, option, setter);
}
@Override
public int parseArguments(Parameters params) throws CmdLineException {
String param = params.getParameter(0);
try {
PathUtils.validatePath(param);
setter.addValue(param);
return 1;
} catch (IllegalArgumentException e) {
throw new CmdLineException(owner,
String.format("\"%s\" is not a valid value for \"%s\"", param, params.getParameter(-1)));
}
}
@Override
public String getDefaultMetaVariable() {
return "zk_path";
}
}
| apache-2.0 |
emmartins/wildfly-server-migration | servers/wildfly10.1/src/main/java/org/jboss/migration/wfly10/dist/full/WildFlyFullServer10_1.java | 1554 | /*
* Copyright 2016 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.migration.wfly10.dist.full;
import org.jboss.migration.core.ProductInfo;
import org.jboss.migration.core.env.MigrationEnvironment;
import org.jboss.migration.wfly10.ServiceLoaderWildFlyServerMigrations10;
import org.jboss.migration.wfly10.WildFlyServerMigrations10;
import java.nio.file.Path;
import java.util.ServiceLoader;
/**
* @author emmartins
*/
public class WildFlyFullServer10_1 extends WildFlyFullServer10_0 {
private static final WildFlyServerMigrations10 SERVER_MIGRATIONS = new ServiceLoaderWildFlyServerMigrations10<>(ServiceLoader.load(WildFlyFullServerMigrationProvider10_1.class));
public WildFlyFullServer10_1(String migrationName, ProductInfo productInfo, Path baseDir, MigrationEnvironment migrationEnvironment) {
super(migrationName, productInfo, baseDir, migrationEnvironment);
}
@Override
protected WildFlyServerMigrations10 getMigrations() {
return SERVER_MIGRATIONS;
}
}
| apache-2.0 |
bitrich-info/xchange-stream | xchange-stream-kraken/src/main/java/info/bitrich/xchangestream/kraken/dto/KrakenEvent.java | 529 | package info.bitrich.xchangestream.kraken.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import info.bitrich.xchangestream.kraken.dto.enums.KrakenEventType;
/** @author pchertalev */
public class KrakenEvent {
@JsonProperty("event")
private final KrakenEventType event;
@JsonProperty("error")
private String error;
public KrakenEvent(KrakenEventType event) {
this.event = event;
}
public KrakenEventType getEvent() {
return event;
}
public String getError() {
return error;
}
}
| apache-2.0 |
QuantumCoding/MultiPlayer-Game | Multi-Player Game/src/com/GameName/Input/ControlRecorder.java | 3658 | package com.GameName.Input;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.lwjgl.input.Controller;
import org.lwjgl.input.Keyboard;
import org.lwjgl.input.Mouse;
import org.lwjgl.opengl.Display;
public class ControlRecorder {
public static int AXIS = 0;
public static int BUTTON = 1;
public static Control[] recordCombo(int type, boolean forward, Control exit, Controller c) {
List<Control> combo = new ArrayList<Control>();
while(exit.isActive() != 0D) {
combo.add(record(type, "UsedInCombo", forward, true, c));
}
return (Control[]) combo.toArray();
}
public static Control record(int type, String name, boolean forward, boolean onlyOnce, Controller c) {
Control contr = null;
double[] data = pullData(type, forward, c);
if((int) data[0] == BUTTON)
contr = new Control(type, (int) data[1], name, onlyOnce, forward, c);
else
contr = new Control(type, (int) data[1], name, true, true, 0.0, c);
return contr;
}
public static double[] pullData(int type, boolean forward, Controller c) {
double[] lookingAt;
double[] newLookingAt;
switch(type) {
case Control.CONTROLLER: lookingAt = new double[c.getAxisCount() + c.getButtonCount()]; break;
case Control.MOUSE: lookingAt = new double[Mouse.getButtonCount() + 2]; break;
case Control.KEYBOARD: lookingAt = new double[Keyboard.getKeyCount()]; break;
default: lookingAt = null; break;
}
// while(GameName.player.getAccess().isPointerDown()) {try{Thread.sleep(10);}catch(InterruptedException e){}}
switch(type) {
case Control.CONTROLLER:
for(int i = 0; i < c.getAxisCount(); i ++) lookingAt[i] = c.getAxisValue(i);
for(int i = 0; i < c.getButtonCount(); i ++) lookingAt[i + c.getAxisCount()] = c.isButtonPressed(i) ? 1 : 0;
break;
case Control.MOUSE:
lookingAt[0] = Mouse.getX(); lookingAt[1] = Mouse.getY();
for(int i = 0; i < Mouse.getButtonCount(); i ++) lookingAt[i + 2] = Mouse.isButtonDown(i) ? 1 : 0;
break;
case Control.KEYBOARD:
for(int i = 0; i < Keyboard.getKeyCount(); i ++) lookingAt[i] = Keyboard.isKeyDown(i) ? 1 : 0;
break;
default: break;
}
do {
newLookingAt = lookingAt.clone();
Display.processMessages();
switch(type) {
case Control.CONTROLLER:
for(int i = 0; i < c.getAxisCount(); i ++) lookingAt[i] = c.getAxisValue(i);
for(int i = 0; i < c.getButtonCount(); i ++) lookingAt[i + c.getAxisCount()] = c.isButtonPressed(i) ? 1 : 0;
break;
case Control.MOUSE:
lookingAt[0] = Mouse.getX(); lookingAt[1] = Mouse.getY();
for(int i = 0; i < Mouse.getButtonCount(); i ++) lookingAt[i + 2] = Mouse.isButtonDown(i) ? 1 : 0;
break;
case Control.KEYBOARD:
for(int i = 0; i < Keyboard.getKeyCount(); i ++) lookingAt[i] = Keyboard.isKeyDown(i) ? 1 : 0;
break;
default: break;
}
} while(Arrays.equals(lookingAt, newLookingAt)); //Arrays.equals(lookingAt, newLookingAt)
int i = 0;
for(i = 0; i < lookingAt.length; i ++) if(lookingAt[i] != newLookingAt[i]) break;
double value = lookingAt[i] - newLookingAt[i];
switch(type) {
case Control.CONTROLLER: return new double[]{i >= c.getAxisCount() ? BUTTON : AXIS, i >= c.getAxisCount() ? i - c.getAxisCount() : i, value};
case Control.MOUSE: return new double[]{i >= 2 ? BUTTON : AXIS, i >= 2 ? i - 2: i, value};
case Control.KEYBOARD: return new double[]{BUTTON, i, forward ? value : -value};
default: return new double[]{0, 0};
}
}
}
| apache-2.0 |
KoehlerSB747/sd-tools | src/main/java/org/sd/util/tree/align/NodePair.java | 8908 | /*
Copyright 2009 Semantic Discovery, Inc. (www.semanticdiscovery.com)
This file is part of the Semantic Discovery Toolkit.
The Semantic Discovery Toolkit is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The Semantic Discovery Toolkit is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with The Semantic Discovery Toolkit. If not, see <http://www.gnu.org/licenses/>.
*/
package org.sd.util.tree.align;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.sd.util.tree.Tree;
/**
* Data structure for recursively pairing nodes from two trees used by
* StructureMatcher.
* <p>
* @author Spence Koehler
*/
class NodePair<T> {
private Tree<T> node1;
public Tree<T> getNode1() {
return node1;
}
private Tree<T> node2;
public Tree<T> getNode2() {
return node2;
}
private NodeComparer<T> nodeComparer;
private boolean computedBasicMatch;
private boolean basicMatch;
private List<ChildWrapper<T>> childWrappers1;
private List<ChildWrapper<T>> childWrappers2;
NodePair(Tree<T> node1, Tree<T> node2, NodeComparer<T> nodeComparer) {
this.node1 = node1;
this.node2 = node2;
this.nodeComparer = nodeComparer;
this.computedBasicMatch = false;
this.basicMatch = false;
this.childWrappers1 = null;
this.childWrappers2 = null;
}
/**
* Build a copy of the matching portions of the trees.
*/
Tree<T> getIntersectionTree() {
Tree<T> result = null;
if (isBasicMatch()) {
result = copyNode(node1);
for (ChildWrapper<T> childWrapper : getChildWrappers1()) {
if (childWrapper.getMatchingChildren() != null) {
final Tree<T> intersectingChild = childWrapper.getMatchingChildren().getIntersectionTree();
if (intersectingChild != null) {
result.addChild(intersectingChild);
}
}
}
}
return result;
}
/**
* Count matches and mismatches of Node1's subtree.
*/
MatchRatio getMatchRatio1() {
int[] matchingNodes = new int[]{0};
int[] mismatchingNodes = new int[]{0};
getMatchRatio1(matchingNodes, mismatchingNodes);
return new MatchRatio(matchingNodes[0], mismatchingNodes[0]);
}
/**
* Count matches and mismatches of Node2's subtree.
*/
MatchRatio getMatchRatio2() {
int[] matchingNodes = new int[]{0};
int[] mismatchingNodes = new int[]{0};
getMatchRatio2(matchingNodes, mismatchingNodes);
return new MatchRatio(matchingNodes[0], mismatchingNodes[0]);
}
/**
* Recursive auxiliary for traversing through Node1 descendants.
*/
private void getMatchRatio1(int[] matchingNodes, int[] mismatchingNodes) {
if (!isBasicMatch()) {
countNodes(node1, mismatchingNodes);
}
else {
++matchingNodes[0];
for (ChildWrapper<T> childWrapper : getChildWrappers1()) {
if (childWrapper.getMatchingChildren() == null) {
countNodes(childWrapper.getChildNode(), mismatchingNodes);
}
else {
childWrapper.getMatchingChildren().getMatchRatio1(matchingNodes, mismatchingNodes);
}
}
}
}
/**
* Recursive auxiliary for traversing through Node2 descendants.
*/
private void getMatchRatio2(int[] matchingNodes, int[] mismatchingNodes) {
if (!isBasicMatch()) {
countNodes(node2, mismatchingNodes);
}
else {
++matchingNodes[0];
for (ChildWrapper<T> childWrapper : getChildWrappers2()) {
if (childWrapper.getMatchingChildren() == null) {
countNodes(childWrapper.getChildNode(), mismatchingNodes);
}
else {
childWrapper.getMatchingChildren().getMatchRatio2(matchingNodes, mismatchingNodes);
}
}
}
}
/**
* Determine whether this instance's nodes are a "basic" match, meaning the Trees'
* Data matches, without regard to the surrounding tree structure.
*/
boolean isBasicMatch() {
if (!computedBasicMatch) {
basicMatch = basicMatch(node1, node2);
computedBasicMatch = true;
}
return basicMatch;
}
/**
* Get this instance's first node's children, each paired with their
* second node matches or single. Lazily create on the first request
* and cache for reuse.
*/
List<ChildWrapper<T>> getChildWrappers1() {
if (childWrappers1 == null) {
createChildWrappers();
}
return childWrappers1;
}
/**
* Get this instance's second node's children, each paired with their
* first node matches or single. Lazily create on the first request
* and cache for reuse.
*/
List<ChildWrapper<T>> getChildWrappers2() {
if (childWrappers2 == null) {
createChildWrappers();
}
return childWrappers2;
}
/**
* Auxiliary for creating child wrappres, pairing nodes where possible.
*/
private void createChildWrappers() {
this.childWrappers1 = new ArrayList<ChildWrapper<T>>();
this.childWrappers2 = new ArrayList<ChildWrapper<T>>();
final LinkedList<Tree<T>> unmatchedChildren1 = new LinkedList<Tree<T>>();
final LinkedList<Tree<T>> unmatchedChildren2 = new LinkedList<Tree<T>>();
if (node1.hasChildren()) {
for (Tree<T> child : node1.getChildren()) unmatchedChildren1.addLast(child);
}
if (node2.hasChildren()) {
for (Tree<T> child : node2.getChildren()) unmatchedChildren2.addLast(child);
}
while (unmatchedChildren1.size() > 0 && unmatchedChildren2.size() > 0) {
int match2Pos = scanForMatch(unmatchedChildren1, unmatchedChildren2);
if (match2Pos < 0) {
final Tree<T> child1 = unmatchedChildren1.removeFirst();
childWrappers1.add(new ChildWrapper<T>(child1));
}
else {
final Tree<T> matchedChild1 = unmatchedChildren1.removeFirst();
for (int counter = 0; counter < match2Pos; ++counter) {
final Tree<T> unmatchedChild2 = unmatchedChildren2.removeFirst();
childWrappers2.add(new ChildWrapper<T>(unmatchedChild2));
}
final Tree<T> matchedChild2 = unmatchedChildren2.removeFirst();
final ChildWrapper<T> matchedWrapper = new ChildWrapper<T>(new NodePair<T>(matchedChild1, matchedChild2, nodeComparer));
childWrappers1.add(matchedWrapper);
childWrappers2.add(matchedWrapper);
}
}
for (Tree<T> unmatchedChild1 : unmatchedChildren1) {
childWrappers1.add(new ChildWrapper<T>(unmatchedChild1));
}
for (Tree<T> unmatchedChild2 : unmatchedChildren2) {
childWrappers2.add(new ChildWrapper<T>(unmatchedChild2));
}
}
/**
* Auxiliary for finding node matches.
*
* This implementation assumes similar ordering in the tree to avoid full
* N-squared complexity.
*/
private int scanForMatch(LinkedList<Tree<T>> unmatchedChildren1, LinkedList<Tree<T>> unmatchedChildren2) {
int result = -1;
final Tree<T> child1 = unmatchedChildren1.getFirst();
for (int childIndex = 0; childIndex < unmatchedChildren2.size(); ++childIndex) {
final Tree<T> child2 = unmatchedChildren2.get(childIndex);
if (basicMatch(child1, child2)) {
result = childIndex;
break;
}
}
return result;
}
/**
* Utility to apply default or overridden basic comparison between nodes.
*/
boolean basicMatch(Tree<T> node1, Tree<T> node2) {
boolean result = false;
if (nodeComparer == null) {
result = defaultBasicMatch(node1, node2);
}
else {
result = nodeComparer.matches(node1, node2);
}
return result;
}
/**
* Default determination of a basic match between two nodes.
*/
private boolean defaultBasicMatch(Tree<T> node1, Tree<T> node2) {
boolean result = false;
final T data1 = node1.getData();
final T data2 = node2.getData();
if (data1 == null || data2 == null) {
if (data1 == null && data2 == null) {
result = true;
}
}
else {
result = data1.equals(data2);
}
return result;
}
/**
* Auxiliary to create a new node containing another node's data.
*/
private Tree<T> copyNode(Tree<T> node) {
return new Tree<T>(node.getData());
}
/**
* Auxiliary to count all nodes in a node's subtree.
*/
private void countNodes(Tree<T> node, int[] counter) {
for (Iterator<Tree<T>> it = node.iterator(Tree.Traversal.DEPTH_FIRST); it.hasNext(); ) {
++counter[0];
}
}
}
| apache-2.0 |