text
stringlengths
7
1.01M
package es.GameSquare.GameSquareApp; import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; import org.springframework.security.web.csrf.CsrfToken; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestParam; @Controller public class SessionController { @Autowired private UsersRepository UsersRpo; private String defaultRole = "ROLE_USER"; @GetMapping("/login") public String login(Model model, HttpServletRequest request) { model.addAttribute("link", "/login"); model.addAttribute("action", "Login"); model.addAttribute("register", false); return "session"; } @GetMapping("/register") public String register(Model model, HttpServletRequest request) { model.addAttribute("action", "Register"); model.addAttribute("link", "/register"); model.addAttribute("register", true); return "session"; } @PostMapping("/register") public String register_sumbit(Model model, HttpServletRequest request, @RequestParam String username, @RequestParam String password, @RequestParam String email) { User existing_user = UsersRpo.findByUserName(username); String message = "User succesfully registered!"; if(existing_user == null) { User u = new User(username, new BCryptPasswordEncoder().encode(password), email); u.addRole(defaultRole); UsersRpo.save(u); } else { message = "The username already exists. Try again."; } model.addAttribute("message", message); model.addAttribute("link", "/"); return "template"; } @GetMapping("/logout") public String logout(Model model, HttpServletRequest request) { model.addAttribute("message", "Successfully logged out!"); model.addAttribute("link", "/"); request.getSession().invalidate(); return "template"; } @GetMapping("/login?error") public String loginerror(Model model, HttpServletRequest request) { model.addAttribute("message", "Incorrect username or password."); model.addAttribute("link", "/"); return "template"; } }
package org.josfranmc.factory; public interface IMultimediaFile { void play(); }
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apiman.gateway.engine.es; import io.apiman.common.es.util.AbstractEsComponent; import io.apiman.common.es.util.EsConstants; import io.apiman.common.es.util.builder.index.EsIndexProperties; import io.apiman.gateway.engine.async.AsyncResultImpl; import io.apiman.gateway.engine.async.IAsyncResultHandler; import io.apiman.gateway.engine.components.ISharedStateComponent; import io.apiman.gateway.engine.es.beans.PrimitiveBean; import io.apiman.gateway.engine.storage.util.BackingStoreUtil; import java.util.HashMap; import java.util.Map; import javax.xml.namespace.QName; import org.apache.commons.codec.binary.Base64; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.common.xcontent.XContentType; import static io.apiman.common.es.util.builder.index.EsIndexUtils.KEYWORD_PROP; import static io.apiman.common.es.util.builder.index.EsIndexUtils.TEXT_AND_KEYWORD_PROP_256; import static io.apiman.gateway.engine.storage.util.BackingStoreUtil.JSON_MAPPER; /** * An elasticsearch implementation of the shared state component. * * @author eric.wittmann@redhat.com */ public class EsSharedStateComponent extends AbstractEsComponent implements ISharedStateComponent { /** * Constructor. * @param config the configuration */ public EsSharedStateComponent(Map<String, String> config) { super(config); } /** * @see io.apiman.gateway.engine.components.ISharedStateComponent#getProperty(java.lang.String, java.lang.String, java.lang.Object, io.apiman.gateway.engine.async.IAsyncResultHandler) */ @Override public <T> void getProperty(final String namespace, final String propertyName, final T defaultValue, final IAsyncResultHandler<T> handler) { if (defaultValue == null) { handler.handle(AsyncResultImpl.<T>create(new Exception("Null defaultValue is not allowed."))); //$NON-NLS-1$ return; } String id = getPropertyId(namespace, propertyName); try { GetResponse response = getClient().get(new GetRequest(getFullIndexName()).id(id), RequestOptions.DEFAULT); if (response.isExists()) { try { T value; if (defaultValue.getClass().isPrimitive() || defaultValue instanceof String) { value = (T) readPrimitive(response); } else { String sourceAsString = response.getSourceAsString(); value = (T) JSON_MAPPER.readValue(sourceAsString, defaultValue.getClass()); } handler.handle(AsyncResultImpl.create(value)); } catch (Exception e) { handler.handle(AsyncResultImpl.<T>create(e)); } } else { handler.handle(AsyncResultImpl.create(defaultValue)); } } catch (Throwable e) { handler.handle(AsyncResultImpl.<T>create(e)); } } /** * @see io.apiman.gateway.engine.components.ISharedStateComponent#setProperty(java.lang.String, java.lang.String, java.lang.Object, io.apiman.gateway.engine.async.IAsyncResultHandler) */ @Override public <T> void setProperty(final String namespace, final String propertyName, final T value, final IAsyncResultHandler<Void> handler) { if (value == null) { handler.handle(AsyncResultImpl.<Void>create(new Exception("Null value is not allowed."))); //$NON-NLS-1$ return; } String source; try { if (value.getClass().isPrimitive() || value instanceof String) { PrimitiveBean pb = new PrimitiveBean(); pb.setValue(String.valueOf(value)); pb.setType(value.getClass().getName()); source = JSON_MAPPER.writeValueAsString(pb); } else { source = JSON_MAPPER.writeValueAsString(value); } } catch (Exception e) { handler.handle(AsyncResultImpl.<Void>create(e)); return; } String id = getPropertyId(namespace, propertyName); String json = source; IndexRequest indexRequest = new IndexRequest(getFullIndexName()).source(json, XContentType.JSON).id(id); try { getClient().index(indexRequest, RequestOptions.DEFAULT); handler.handle(AsyncResultImpl.create((Void) null)); } catch (Throwable e) { handler.handle(AsyncResultImpl.<Void>create(e)); } } /** * @see io.apiman.gateway.engine.components.ISharedStateComponent#clearProperty(java.lang.String, java.lang.String, io.apiman.gateway.engine.async.IAsyncResultHandler) */ @Override public <T> void clearProperty(final String namespace, final String propertyName, final IAsyncResultHandler<Void> handler) { String id = getPropertyId(namespace, propertyName); DeleteRequest deleteRequest = new DeleteRequest(getFullIndexName(), id); try { getClient().delete(deleteRequest, RequestOptions.DEFAULT); handler.handle(AsyncResultImpl.create((Void) null)); } catch (Throwable e) { handler.handle(AsyncResultImpl.<Void>create(e)); } } /** * @param namespace * @param propertyName */ private String getPropertyId(String namespace, String propertyName) { String qn = new QName(namespace, propertyName).toString(); return Base64.encodeBase64String(qn.getBytes()); } /** * Reads a stored primitive. * @param response */ protected Object readPrimitive(GetResponse response) throws Exception { String sourceAsString = response.getSourceAsString(); PrimitiveBean pb = JSON_MAPPER.readValue(sourceAsString,PrimitiveBean.class); String value = pb.getValue(); Class<?> c = Class.forName(pb.getType()); return BackingStoreUtil.readPrimitive(c, value); } /** * @see AbstractEsComponent#getDefaultIndexPrefix() */ @Override protected String getDefaultIndexPrefix() { return EsConstants.GATEWAY_INDEX_NAME; } @Override public Map<String, EsIndexProperties> getEsIndices() { EsIndexProperties indexDefinition = EsIndexProperties.builder() .addProperty(EsConstants.ES_FIELD_ORGANIZATION_ID, KEYWORD_PROP) .addProperty(EsConstants.ES_FIELD_TYPE, KEYWORD_PROP) .addProperty(EsConstants.ES_FIELD_VALUE, TEXT_AND_KEYWORD_PROP_256) .addProperty(EsConstants.ES_FIELD_VERSION, KEYWORD_PROP) .build(); Map<String, EsIndexProperties> indexMap = new HashMap<>(); indexMap.put(EsConstants.INDEX_SHARED_STATE_PROPERTY, indexDefinition); return indexMap; } /** * get index full name for shared state property * @return full index name */ private String getFullIndexName() { return (getIndexPrefix() + EsConstants.INDEX_SHARED_STATE_PROPERTY).toLowerCase(); } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.recyclerview; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.RadioButton; /** * Demonstrates the use of {@link RecyclerView} with a {@link LinearLayoutManager} and a * {@link GridLayoutManager}. */ public class RecyclerViewFragment extends Fragment { private static final String TAG = "RecyclerViewFragment"; private static final String KEY_LAYOUT_MANAGER = "layoutManager"; private static final int SPAN_COUNT = 2; private static final int DATASET_COUNT = 1000; private enum LayoutManagerType { GRID_LAYOUT_MANAGER, LINEAR_LAYOUT_MANAGER } protected LayoutManagerType mCurrentLayoutManagerType; protected RadioButton mLinearLayoutRadioButton; protected RadioButton mGridLayoutRadioButton; protected RecyclerView mRecyclerView; protected CustomAdapter mAdapter; protected RecyclerView.LayoutManager mLayoutManager; protected String[] mDataset; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Initialize dataset, this data would usually come from a local content provider or // remote server. initDataset(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.recycler_view_frag, container, false); rootView.setTag(TAG); // BEGIN_INCLUDE(initializeRecyclerView) mRecyclerView = (RecyclerView) rootView.findViewById(R.id.recyclerView); // LinearLayoutManager is used here, this will layout the elements in a similar fashion // to the way ListView would layout elements. The RecyclerView.LayoutManager defines how // elements are laid out. mLayoutManager = new LinearLayoutManager(getActivity()); mCurrentLayoutManagerType = LayoutManagerType.LINEAR_LAYOUT_MANAGER; if (savedInstanceState != null) { // Restore saved layout manager type. mCurrentLayoutManagerType = (LayoutManagerType) savedInstanceState .getSerializable(KEY_LAYOUT_MANAGER); } setRecyclerViewLayoutManager(mCurrentLayoutManagerType); mAdapter = new CustomAdapter(mDataset); // Set CustomAdapter as the adapter for RecyclerView. mRecyclerView.setAdapter(mAdapter); // END_INCLUDE(initializeRecyclerView) mLinearLayoutRadioButton = (RadioButton) rootView.findViewById(R.id.linear_layout_rb); mLinearLayoutRadioButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { setRecyclerViewLayoutManager(LayoutManagerType.LINEAR_LAYOUT_MANAGER); } }); mGridLayoutRadioButton = (RadioButton) rootView.findViewById(R.id.grid_layout_rb); mGridLayoutRadioButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { setRecyclerViewLayoutManager(LayoutManagerType.GRID_LAYOUT_MANAGER); } }); return rootView; } /** * Set RecyclerView's LayoutManager to the one given. * * @param layoutManagerType Type of layout manager to switch to. */ public void setRecyclerViewLayoutManager(LayoutManagerType layoutManagerType) { int scrollPosition = 0; // If a layout manager has already been set, get current scroll position. if (mRecyclerView.getLayoutManager() != null) { scrollPosition = ((LinearLayoutManager) mRecyclerView.getLayoutManager()) .findFirstCompletelyVisibleItemPosition(); } switch (layoutManagerType) { case GRID_LAYOUT_MANAGER: mLayoutManager = new GridLayoutManager(getActivity(), SPAN_COUNT); mCurrentLayoutManagerType = LayoutManagerType.GRID_LAYOUT_MANAGER; break; case LINEAR_LAYOUT_MANAGER: mLayoutManager = new LinearLayoutManager(getActivity()); mCurrentLayoutManagerType = LayoutManagerType.LINEAR_LAYOUT_MANAGER; break; default: mLayoutManager = new LinearLayoutManager(getActivity()); mCurrentLayoutManagerType = LayoutManagerType.LINEAR_LAYOUT_MANAGER; } mRecyclerView.setLayoutManager(mLayoutManager); mRecyclerView.scrollToPosition(scrollPosition); } @Override public void onSaveInstanceState(Bundle savedInstanceState) { // Save currently selected layout manager. savedInstanceState.putSerializable(KEY_LAYOUT_MANAGER, mCurrentLayoutManagerType); super.onSaveInstanceState(savedInstanceState); } /** * Generates Strings for RecyclerView's adapter. This data would usually come * from a local content provider or remote server. */ private void initDataset() { mDataset = new String[DATASET_COUNT]; for (int i = 0; i < DATASET_COUNT; i++) { mDataset[i] = "This is element #" + i; } } }
/* * Copyright 2016-2021 DiffPlug * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.diffplug.spotless.maven.java; import org.junit.jupiter.api.Test; import com.diffplug.spotless.maven.MavenIntegrationHarness; class EclipseFormatStepTest extends MavenIntegrationHarness { @Test void testEclipse() throws Exception { writePomWithJavaSteps( "<eclipse>", " <file>${basedir}/formatter.xml</file>", "</eclipse>"); setFile("formatter.xml").toResource("java/eclipse/formatter.xml"); String path = "src/main/java/test.java"; setFile(path).toResource("java/eclipse/JavaCodeUnformatted.test"); mavenRunner().withArguments("spotless:apply").runNoError(); assertFile(path).sameAsResource("java/eclipse/JavaCodeFormatted.test"); } }
package com.muli.java.shoppingcartbackend.dao; import java.util.List; import com.muli.java.shoppingcartbackend.dto.Category; public interface CategoryDAO { public List<Category> getCategoryList(); public Category getCategory(int id); }
/* * Copyright 2012-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.docs.howto.dataaccess; import java.sql.SQLException; import javax.sql.DataSource; import com.zaxxer.hikari.HikariDataSource; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Import; import org.springframework.test.context.junit.jupiter.SpringExtension; import static org.assertj.core.api.Assertions.assertThat; /** * Test for {@link ConfigurableDataSourceConfiguration}. * * @author Stephane Nicoll */ @ExtendWith(SpringExtension.class) @SpringBootTest(properties = { "app.datasource.url=jdbc:h2:mem:configurable;DB_CLOSE_DELAY=-1", "app.datasource.configuration.maximum-pool-size=42" }) @Import(ConfigurableDataSourceConfiguration.class) class ConfigurableDataSourceConfigurationTests { @Autowired private ApplicationContext context; @Test void validateConfiguration() throws SQLException { assertThat(this.context.getBeansOfType(DataSource.class)).hasSize(1); HikariDataSource dataSource = this.context.getBean(HikariDataSource.class); assertThat(dataSource.getConnection().getMetaData().getURL()).isEqualTo("jdbc:h2:mem:configurable"); assertThat(dataSource.getMaximumPoolSize()).isEqualTo(42); } }
package org.areco.ecommerce.deploymentscripts.jalo; /** * Represents a group of servers where the deployment scripts may run. */ public class DeploymentEnvironment extends GeneratedDeploymentEnvironment { // We don't have any jalo method. }
/* * Copyright (C) 2009 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.media; import android.annotation.UnsupportedAppUsage; import android.os.Parcel; import android.util.Log; import android.util.MathUtils; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Set; import java.util.TimeZone; /** Class to hold the media's metadata. Metadata are used for human consumption and can be embedded in the media (e.g shoutcast) or available from an external source. The source can be local (e.g thumbnail stored in the DB) or remote. Metadata is like a Bundle. It is sparse and each key can occur at most once. The key is an integer and the value is the actual metadata. The caller is expected to know the type of the metadata and call the right get* method to fetch its value. @hide @deprecated Use {@link MediaMetadata}. */ @Deprecated public class Metadata { // The metadata are keyed using integers rather than more heavy // weight strings. We considered using Bundle to ship the metadata // between the native layer and the java layer but dropped that // option since keeping in sync a native implementation of Bundle // and the java one would be too burdensome. Besides Bundle uses // String for its keys. // The key range [0 8192) is reserved for the system. // // We manually serialize the data in Parcels. For large memory // blob (bitmaps, raw pictures) we use MemoryFile which allow the // client to make the data purge-able once it is done with it. // /** * {@hide} */ public static final int ANY = 0; // Never used for metadata returned, only for filtering. // Keep in sync with kAny in MediaPlayerService.cpp // Playback capabilities. /** * Indicate whether the media can be paused */ @UnsupportedAppUsage public static final int PAUSE_AVAILABLE = 1; // Boolean /** * Indicate whether the media can be backward seeked */ @UnsupportedAppUsage public static final int SEEK_BACKWARD_AVAILABLE = 2; // Boolean /** * Indicate whether the media can be forward seeked */ @UnsupportedAppUsage public static final int SEEK_FORWARD_AVAILABLE = 3; // Boolean /** * Indicate whether the media can be seeked */ @UnsupportedAppUsage public static final int SEEK_AVAILABLE = 4; // Boolean // TODO: Should we use numbers compatible with the metadata retriever? /** * {@hide} */ public static final int TITLE = 5; // String /** * {@hide} */ public static final int COMMENT = 6; // String /** * {@hide} */ public static final int COPYRIGHT = 7; // String /** * {@hide} */ public static final int ALBUM = 8; // String /** * {@hide} */ public static final int ARTIST = 9; // String /** * {@hide} */ public static final int AUTHOR = 10; // String /** * {@hide} */ public static final int COMPOSER = 11; // String /** * {@hide} */ public static final int GENRE = 12; // String /** * {@hide} */ public static final int DATE = 13; // Date /** * {@hide} */ public static final int DURATION = 14; // Integer(millisec) /** * {@hide} */ public static final int CD_TRACK_NUM = 15; // Integer 1-based /** * {@hide} */ public static final int CD_TRACK_MAX = 16; // Integer /** * {@hide} */ public static final int RATING = 17; // String /** * {@hide} */ public static final int ALBUM_ART = 18; // byte[] /** * {@hide} */ public static final int VIDEO_FRAME = 19; // Bitmap /** * {@hide} */ public static final int BIT_RATE = 20; // Integer, Aggregate rate of // all the streams in bps. /** * {@hide} */ public static final int AUDIO_BIT_RATE = 21; // Integer, bps /** * {@hide} */ public static final int VIDEO_BIT_RATE = 22; // Integer, bps /** * {@hide} */ public static final int AUDIO_SAMPLE_RATE = 23; // Integer, Hz /** * {@hide} */ public static final int VIDEO_FRAME_RATE = 24; // Integer, Hz // See RFC2046 and RFC4281. /** * {@hide} */ public static final int MIME_TYPE = 25; // String /** * {@hide} */ public static final int AUDIO_CODEC = 26; // String /** * {@hide} */ public static final int VIDEO_CODEC = 27; // String /** * {@hide} */ public static final int VIDEO_HEIGHT = 28; // Integer /** * {@hide} */ public static final int VIDEO_WIDTH = 29; // Integer /** * {@hide} */ public static final int NUM_TRACKS = 30; // Integer /** * {@hide} */ public static final int DRM_CRIPPLED = 31; // Boolean private static final int LAST_SYSTEM = 31; private static final int FIRST_CUSTOM = 8192; // Shorthands to set the MediaPlayer's metadata filter. /** * {@hide} */ public static final Set<Integer> MATCH_NONE = Collections.EMPTY_SET; /** * {@hide} */ public static final Set<Integer> MATCH_ALL = Collections.singleton(ANY); /** * {@hide} */ public static final int STRING_VAL = 1; /** * {@hide} */ public static final int INTEGER_VAL = 2; /** * {@hide} */ public static final int BOOLEAN_VAL = 3; /** * {@hide} */ public static final int LONG_VAL = 4; /** * {@hide} */ public static final int DOUBLE_VAL = 5; /** * {@hide} */ public static final int DATE_VAL = 6; /** * {@hide} */ public static final int BYTE_ARRAY_VAL = 7; // FIXME: misses a type for shared heap is missing (MemoryFile). // FIXME: misses a type for bitmaps. private static final int LAST_TYPE = 7; private static final String TAG = "media.Metadata"; private static final int kInt32Size = 4; private static final int kMetaHeaderSize = 2 * kInt32Size; // size + marker private static final int kRecordHeaderSize = 3 * kInt32Size; // size + id + type private static final int kMetaMarker = 0x4d455441; // 'M' 'E' 'T' 'A' // After a successful parsing, set the parcel with the serialized metadata. private Parcel mParcel; // Map to associate a Metadata key (e.g TITLE) with the offset of // the record's payload in the parcel. // Used to look up if a key was present too. // Key: Metadata ID // Value: Offset of the metadata type field in the record. private final HashMap<Integer, Integer> mKeyToPosMap = new HashMap<Integer, Integer>(); /** * {@hide} */ @UnsupportedAppUsage public Metadata() { } /** * Go over all the records, collecting metadata keys and records' * type field offset in the Parcel. These are stored in * mKeyToPosMap for latter retrieval. * Format of a metadata record: <pre> 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | record size | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | metadata key | // TITLE +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | metadata type | // STRING_VAL +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | | .... metadata payload .... | | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ </pre> * @param parcel With the serialized records. * @param bytesLeft How many bytes in the parcel should be processed. * @return false if an error occurred during parsing. */ private boolean scanAllRecords(Parcel parcel, int bytesLeft) { int recCount = 0; boolean error = false; mKeyToPosMap.clear(); while (bytesLeft > kRecordHeaderSize) { final int start = parcel.dataPosition(); // Check the size. final int size = parcel.readInt(); if (size <= kRecordHeaderSize) { // at least 1 byte should be present. Log.e(TAG, "Record is too short"); error = true; break; } // Check the metadata key. final int metadataId = parcel.readInt(); if (!checkMetadataId(metadataId)) { error = true; break; } // Store the record offset which points to the type // field so we can later on read/unmarshall the record // payload. if (mKeyToPosMap.containsKey(metadataId)) { Log.e(TAG, "Duplicate metadata ID found"); error = true; break; } mKeyToPosMap.put(metadataId, parcel.dataPosition()); // Check the metadata type. final int metadataType = parcel.readInt(); if (metadataType <= 0 || metadataType > LAST_TYPE) { Log.e(TAG, "Invalid metadata type " + metadataType); error = true; break; } // Skip to the next one. try { parcel.setDataPosition(MathUtils.addOrThrow(start, size)); } catch (IllegalArgumentException e) { Log.e(TAG, "Invalid size: " + e.getMessage()); error = true; break; } bytesLeft -= size; ++recCount; } if (0 != bytesLeft || error) { Log.e(TAG, "Ran out of data or error on record " + recCount); mKeyToPosMap.clear(); return false; } else { return true; } } /** * Check a parcel containing metadata is well formed. The header * is checked as well as the individual records format. However, the * data inside the record is not checked because we do lazy access * (we check/unmarshall only data the user asks for.) * * Format of a metadata parcel: <pre> 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | metadata total size | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | 'M' | 'E' | 'T' | 'A' | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | | .... metadata records .... | | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ </pre> * * @param parcel With the serialized data. Metadata keeps a * reference on it to access it later on. The caller * should not modify the parcel after this call (and * not call recycle on it.) * @return false if an error occurred. * {@hide} */ @UnsupportedAppUsage public boolean parse(Parcel parcel) { if (parcel.dataAvail() < kMetaHeaderSize) { Log.e(TAG, "Not enough data " + parcel.dataAvail()); return false; } final int pin = parcel.dataPosition(); // to roll back in case of errors. final int size = parcel.readInt(); // The extra kInt32Size below is to account for the int32 'size' just read. if (parcel.dataAvail() + kInt32Size < size || size < kMetaHeaderSize) { Log.e(TAG, "Bad size " + size + " avail " + parcel.dataAvail() + " position " + pin); parcel.setDataPosition(pin); return false; } // Checks if the 'M' 'E' 'T' 'A' marker is present. final int kShouldBeMetaMarker = parcel.readInt(); if (kShouldBeMetaMarker != kMetaMarker ) { Log.e(TAG, "Marker missing " + Integer.toHexString(kShouldBeMetaMarker)); parcel.setDataPosition(pin); return false; } // Scan the records to collect metadata ids and offsets. if (!scanAllRecords(parcel, size - kMetaHeaderSize)) { parcel.setDataPosition(pin); return false; } mParcel = parcel; return true; } /** * @return The set of metadata ID found. */ @UnsupportedAppUsage public Set<Integer> keySet() { return mKeyToPosMap.keySet(); } /** * @return true if a value is present for the given key. */ @UnsupportedAppUsage public boolean has(final int metadataId) { if (!checkMetadataId(metadataId)) { throw new IllegalArgumentException("Invalid key: " + metadataId); } return mKeyToPosMap.containsKey(metadataId); } // Accessors. // Caller must make sure the key is present using the {@code has} // method otherwise a RuntimeException will occur. /** * {@hide} */ @UnsupportedAppUsage public String getString(final int key) { checkType(key, STRING_VAL); return mParcel.readString(); } /** * {@hide} */ @UnsupportedAppUsage public int getInt(final int key) { checkType(key, INTEGER_VAL); return mParcel.readInt(); } /** * Get the boolean value indicated by key */ @UnsupportedAppUsage public boolean getBoolean(final int key) { checkType(key, BOOLEAN_VAL); return mParcel.readInt() == 1; } /** * {@hide} */ @UnsupportedAppUsage public long getLong(final int key) { checkType(key, LONG_VAL); /** * {@hide} */ return mParcel.readLong(); } /** * {@hide} */ @UnsupportedAppUsage public double getDouble(final int key) { checkType(key, DOUBLE_VAL); return mParcel.readDouble(); } /** * {@hide} */ @UnsupportedAppUsage public byte[] getByteArray(final int key) { checkType(key, BYTE_ARRAY_VAL); return mParcel.createByteArray(); } /** * {@hide} */ @UnsupportedAppUsage public Date getDate(final int key) { checkType(key, DATE_VAL); final long timeSinceEpoch = mParcel.readLong(); final String timeZone = mParcel.readString(); if (timeZone.length() == 0) { return new Date(timeSinceEpoch); } else { TimeZone tz = TimeZone.getTimeZone(timeZone); Calendar cal = Calendar.getInstance(tz); cal.setTimeInMillis(timeSinceEpoch); return cal.getTime(); } } /** * @return the last available system metadata id. Ids are * 1-indexed. * {@hide} */ public static int lastSytemId() { return LAST_SYSTEM; } /** * @return the first available cutom metadata id. * {@hide} */ public static int firstCustomId() { return FIRST_CUSTOM; } /** * @return the last value of known type. Types are 1-indexed. * {@hide} */ public static int lastType() { return LAST_TYPE; } /** * Check val is either a system id or a custom one. * @param val Metadata key to test. * @return true if it is in a valid range. **/ private boolean checkMetadataId(final int val) { if (val <= ANY || (LAST_SYSTEM < val && val < FIRST_CUSTOM)) { Log.e(TAG, "Invalid metadata ID " + val); return false; } return true; } /** * Check the type of the data match what is expected. */ private void checkType(final int key, final int expectedType) { final int pos = mKeyToPosMap.get(key); mParcel.setDataPosition(pos); final int type = mParcel.readInt(); if (type != expectedType) { throw new IllegalStateException("Wrong type " + expectedType + " but got " + type); } } }
package com.yeungeek.monkeyandroid.util; /** * Created by yeungeek on 2016/4/3. */ public interface ImageSize { String AVATAR_120 = "&s=120"; }
package com.alipay.api.domain; import com.alipay.api.AlipayObject; import com.alipay.api.internal.mapping.ApiField; /** * 查询企业可用商户信息 * * @author auto create * @since 1.0, 2021-09-24 16:47:19 */ public class AlipayEbppInvoiceExpensesceneMerchantQueryModel extends AlipayObject { private static final long serialVersionUID = 1215745611286821638L; /** * 企业签约企业合花ID-共同账号ID */ @ApiField("account_id") private String accountId; /** * 授权签约协议号 */ @ApiField("agreement_no") private String agreementNo; /** * 费用类型 MEAL - 工作餐 */ @ApiField("expense_type") private String expenseType; /** * 页码 */ @ApiField("page_num") private Long pageNum; /** * 每页条数(最大100) */ @ApiField("page_size") private Long pageSize; /** * 商户ID */ @ApiField("role_id") private String roleId; /** * 门店ID */ @ApiField("shop_id") private String shopId; public String getAccountId() { return this.accountId; } public void setAccountId(String accountId) { this.accountId = accountId; } public String getAgreementNo() { return this.agreementNo; } public void setAgreementNo(String agreementNo) { this.agreementNo = agreementNo; } public String getExpenseType() { return this.expenseType; } public void setExpenseType(String expenseType) { this.expenseType = expenseType; } public Long getPageNum() { return this.pageNum; } public void setPageNum(Long pageNum) { this.pageNum = pageNum; } public Long getPageSize() { return this.pageSize; } public void setPageSize(Long pageSize) { this.pageSize = pageSize; } public String getRoleId() { return this.roleId; } public void setRoleId(String roleId) { this.roleId = roleId; } public String getShopId() { return this.shopId; } public void setShopId(String shopId) { this.shopId = shopId; } }
package com.getcapacitor.plugin.notification; import android.content.ContentResolver; import android.content.Context; import com.getcapacitor.JSArray; import com.getcapacitor.JSObject; import com.getcapacitor.Logger; import com.getcapacitor.PluginCall; import com.getcapacitor.plugin.util.AssetUtil; import java.text.ParseException; import java.util.ArrayList; import java.util.List; import org.json.JSONException; import org.json.JSONObject; /** * Local notification object mapped from json plugin */ public class LocalNotification { private String title; private String body; private Integer id; private String sound; private String smallIcon; private String iconColor; private String actionTypeId; private String group; private boolean groupSummary; private boolean ongoing; private boolean autoCancel; private JSObject extra; private List<LocalNotificationAttachment> attachments; private LocalNotificationSchedule schedule; private String channelId; private String source; public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getBody() { return body; } public void setBody(String body) { this.body = body; } public LocalNotificationSchedule getSchedule() { return schedule; } public void setSchedule(LocalNotificationSchedule schedule) { this.schedule = schedule; } public String getSound(Context context, int defaultSound) { String soundPath = null; int resId = AssetUtil.RESOURCE_ID_ZERO_VALUE; String name = AssetUtil.getResourceBaseName(sound); if (name != null) { resId = AssetUtil.getResourceID(context, name, "raw"); } if (resId == AssetUtil.RESOURCE_ID_ZERO_VALUE) { resId = defaultSound; } if (resId != AssetUtil.RESOURCE_ID_ZERO_VALUE) { soundPath = ContentResolver.SCHEME_ANDROID_RESOURCE + "://" + context.getPackageName() + "/" + resId; } return soundPath; } public void setSound(String sound) { this.sound = sound; } public void setSmallIcon(String smallIcon) { this.smallIcon = AssetUtil.getResourceBaseName(smallIcon); } public String getIconColor(String globalColor) { // use the one defined local before trying for a globally defined color if (iconColor != null) { return iconColor; } return globalColor; } public void setIconColor(String iconColor) { this.iconColor = iconColor; } public List<LocalNotificationAttachment> getAttachments() { return attachments; } public void setAttachments(List<LocalNotificationAttachment> attachments) { this.attachments = attachments; } public String getActionTypeId() { return actionTypeId; } public void setActionTypeId(String actionTypeId) { this.actionTypeId = actionTypeId; } public String getGroup() { return group; } public void setGroup(String group) { this.group = group; } public JSObject getExtra() { return extra; } public void setExtra(JSObject extra) { this.extra = extra; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public boolean isGroupSummary() { return groupSummary; } public void setGroupSummary(boolean groupSummary) { this.groupSummary = groupSummary; } public boolean isOngoing() { return ongoing; } public void setOngoing(boolean ongoing) { this.ongoing = ongoing; } public boolean isAutoCancel() { return autoCancel; } public void setAutoCancel(boolean autoCancel) { this.autoCancel = autoCancel; } public String getChannelId() { return channelId; } public void setChannelId(String channelId) { this.channelId = channelId; } /** * Build list of the notifications from remote plugin call */ public static List<LocalNotification> buildNotificationList(PluginCall call) { JSArray notificationArray = call.getArray("notifications"); if (notificationArray == null) { call.error("Must provide notifications array as notifications option"); return null; } List<LocalNotification> resultLocalNotifications = new ArrayList<>(notificationArray.length()); List<JSONObject> notificationsJson; try { notificationsJson = notificationArray.toList(); } catch (JSONException e) { call.error("Provided notification format is invalid"); return null; } for (JSONObject jsonNotification : notificationsJson) { JSObject notification = null; try { notification = JSObject.fromJSONObject(jsonNotification); } catch (JSONException e) { call.error("Invalid JSON object sent to NotificationPlugin", e); return null; } try { LocalNotification activeLocalNotification = buildNotificationFromJSObject(notification); resultLocalNotifications.add(activeLocalNotification); } catch (ParseException e) { call.error("Invalid date format sent to Notification plugin", e); return null; } } return resultLocalNotifications; } public static LocalNotification buildNotificationFromJSObject(JSObject jsonObject) throws ParseException { LocalNotification localNotification = new LocalNotification(); localNotification.setSource(jsonObject.toString()); localNotification.setId(jsonObject.getInteger("id")); localNotification.setBody(jsonObject.getString("body")); localNotification.setActionTypeId(jsonObject.getString("actionTypeId")); localNotification.setGroup(jsonObject.getString("group")); localNotification.setSound(jsonObject.getString("sound")); localNotification.setTitle(jsonObject.getString("title")); localNotification.setSmallIcon(jsonObject.getString("smallIcon")); localNotification.setIconColor(jsonObject.getString("iconColor")); localNotification.setAttachments(LocalNotificationAttachment.getAttachments(jsonObject)); localNotification.setGroupSummary(jsonObject.getBoolean("groupSummary", false)); localNotification.setChannelId(jsonObject.getString("channelId")); localNotification.setSchedule(new LocalNotificationSchedule(jsonObject)); localNotification.setExtra(jsonObject.getJSObject("extra")); localNotification.setOngoing(jsonObject.getBoolean("ongoing", false)); localNotification.setAutoCancel(jsonObject.getBoolean("autoCancel", true)); return localNotification; } public static List<Integer> getLocalNotificationPendingList(PluginCall call) { List<JSONObject> notifications = null; try { notifications = call.getArray("notifications").toList(); } catch (JSONException e) {} if (notifications == null || notifications.size() == 0) { call.error("Must provide notifications array as notifications option"); return null; } List<Integer> notificationsList = new ArrayList<>(notifications.size()); for (JSONObject notificationToCancel : notifications) { try { notificationsList.add(notificationToCancel.getInt("id")); } catch (JSONException e) {} } return notificationsList; } public static JSObject buildLocalNotificationPendingList(List<String> ids) { JSObject result = new JSObject(); JSArray jsArray = new JSArray(); for (String id : ids) { JSObject notification = new JSObject(); notification.put("id", id); jsArray.put(notification); } result.put("notifications", jsArray); return result; } public int getSmallIcon(Context context, int defaultIcon) { int resId = AssetUtil.RESOURCE_ID_ZERO_VALUE; if (smallIcon != null) { resId = AssetUtil.getResourceID(context, smallIcon, "drawable"); } if (resId == AssetUtil.RESOURCE_ID_ZERO_VALUE) { resId = defaultIcon; } return resId; } public boolean isScheduled() { return ( this.schedule != null && (this.schedule.getOn() != null || this.schedule.getAt() != null || this.schedule.getEvery() != null) ); } @Override public String toString() { return ( "LocalNotification{" + "title='" + title + '\'' + ", body='" + body + '\'' + ", id=" + id + ", sound='" + sound + '\'' + ", smallIcon='" + smallIcon + '\'' + ", iconColor='" + iconColor + '\'' + ", actionTypeId='" + actionTypeId + '\'' + ", group='" + group + '\'' + ", extra=" + extra + ", attachments=" + attachments + ", schedule=" + schedule + ", groupSummary=" + groupSummary + ", ongoing=" + ongoing + ", autoCancel=" + autoCancel + '}' ); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; LocalNotification that = (LocalNotification) o; if (title != null ? !title.equals(that.title) : that.title != null) return false; if (body != null ? !body.equals(that.body) : that.body != null) return false; if (id != null ? !id.equals(that.id) : that.id != null) return false; if (sound != null ? !sound.equals(that.sound) : that.sound != null) return false; if (smallIcon != null ? !smallIcon.equals(that.smallIcon) : that.smallIcon != null) return false; if (iconColor != null ? !iconColor.equals(that.iconColor) : that.iconColor != null) return false; if (actionTypeId != null ? !actionTypeId.equals(that.actionTypeId) : that.actionTypeId != null) return false; if (group != null ? !group.equals(that.group) : that.group != null) return false; if (extra != null ? !extra.equals(that.extra) : that.extra != null) return false; if (attachments != null ? !attachments.equals(that.attachments) : that.attachments != null) return false; if (groupSummary != that.groupSummary) return false; if (ongoing != that.ongoing) return false; if (autoCancel != that.autoCancel) return false; return schedule != null ? schedule.equals(that.schedule) : that.schedule == null; } @Override public int hashCode() { int result = title != null ? title.hashCode() : 0; result = 31 * result + (body != null ? body.hashCode() : 0); result = 31 * result + (id != null ? id.hashCode() : 0); result = 31 * result + (sound != null ? sound.hashCode() : 0); result = 31 * result + (smallIcon != null ? smallIcon.hashCode() : 0); result = 31 * result + (iconColor != null ? iconColor.hashCode() : 0); result = 31 * result + (actionTypeId != null ? actionTypeId.hashCode() : 0); result = 31 * result + (group != null ? group.hashCode() : 0); result = 31 * result + Boolean.hashCode(groupSummary); result = 31 * result + Boolean.hashCode(ongoing); result = 31 * result + Boolean.hashCode(autoCancel); result = 31 * result + (extra != null ? extra.hashCode() : 0); result = 31 * result + (attachments != null ? attachments.hashCode() : 0); result = 31 * result + (schedule != null ? schedule.hashCode() : 0); return result; } public void setExtraFromString(String extraFromString) { try { JSONObject jsonObject = new JSONObject(extraFromString); this.extra = JSObject.fromJSONObject(jsonObject); } catch (JSONException e) { Logger.error(Logger.tags("LN"), "Cannot rebuild extra data", e); } } public String getSource() { return source; } public void setSource(String source) { this.source = source; } }
package com.rodiconmc.rodisongsaver; import org.bukkit.ChatColor; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; import org.bukkit.command.TabExecutor; import org.bukkit.entity.Player; import org.bukkit.plugin.Plugin; import java.util.ArrayList; import java.util.List; public class RecordCommand implements TabExecutor { Plugin plugin; RecordCommand(Plugin plugin) { this.plugin = plugin; } @Override public boolean onCommand(CommandSender sender, Command command, String label, String[] rawArgs) { if (!(sender instanceof Player)) { sender.sendMessage(ChatColor.RED + "Only players can do this command"); return true; } Player player = (Player) sender; String stringArgs = String.join(" ", rawArgs); String[] args = getArgs(stringArgs); if (args.length != 2) { player.sendMessage("Correct usage: /record <song name> <file name>"); return true; } new Recording(args[0], args[1], player, plugin); return true; } @Override public List<String> onTabComplete(CommandSender sender, Command command, String alias, String[] args) { return new ArrayList<>(); } private String[] getArgs(String string) { List<String> args = new ArrayList<>(); for (int i = 0; i < string.length(); i++) { char testChar = string.toCharArray()[i]; if (testChar == '"') { i++; StringBuilder builder = new StringBuilder(); while (i + 1 < string.length()) { char quotedChar = string.toCharArray()[i]; if (quotedChar == '"') { i++; break; } builder.append(quotedChar); i++; } args.add(builder.toString()); } else if (testChar == ' ') { i++; } else { StringBuilder builder = new StringBuilder(); while (i< string.length()) { char quotedChar = string.toCharArray()[i]; if (quotedChar == ' ') { break; } builder.append(quotedChar); i++; } args.add(builder.toString()); } } return args.toArray(String[]::new); } }
package aademo.superawesome.tv.awesomeadsdemo.activities.creatives.bitmap; import android.graphics.Bitmap; public interface BitmapListener { void gotBitmap (Bitmap bitmap); void noBitmap (); }
/* * Copyright 2015-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glue.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/DeleteUserDefinedFunction" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DeleteUserDefinedFunctionResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DeleteUserDefinedFunctionResult == false) return false; DeleteUserDefinedFunctionResult other = (DeleteUserDefinedFunctionResult) obj; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; return hashCode; } @Override public DeleteUserDefinedFunctionResult clone() { try { return (DeleteUserDefinedFunctionResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * HE_Mesh Frederik Vanhoutte - www.wblut.com * * https://github.com/wblut/HE_Mesh * A Processing/Java library for for creating and manipulating polygonal meshes. * * Public Domain: http://creativecommons.org/publicdomain/zero/1.0/ */ package wblut.hemesh; import java.util.Collection; import java.util.List; import org.eclipse.collections.impl.list.mutable.FastList; import wblut.core.WB_ProgressReporter.WB_ProgressCounter; import wblut.geom.WB_AABB; import wblut.geom.WB_Coord; import wblut.geom.WB_Point; import wblut.geom.WB_Voronoi; import wblut.geom.WB_VoronoiCell3D; import wblut.math.WB_ConstantScalarParameter; import wblut.math.WB_ScalarParameter; /** * Creates the Voronoi cells of a collection of points, constrained by a box. * * @author Frederik Vanhoutte (W:Blut) * */ public class HEMC_VoronoiBox extends HEMC_MultiCreator { /** Points. */ private List<WB_Coord> points; /** Number of points. */ private int numberOfPoints; /** Container. */ private WB_AABB aabb; private boolean bruteForce; /** Offset. */ private WB_ScalarParameter offset; /** * */ public HEMC_VoronoiBox() { super(); offset = WB_ScalarParameter.ZERO; } /** * Set points that define cell centers. * * @param points * array of vertex positions * @return self */ public HEMC_VoronoiBox setPoints(final WB_Coord[] points) { this.points = new FastList<WB_Coord>(); for (WB_Coord p : points) { this.points.add(p); } return this; } /** * Set points that define cell centers. * * @param points * collection of vertex positions * @return self */ public HEMC_VoronoiBox setPoints(final Collection<? extends WB_Coord> points) { this.points = new FastList<WB_Coord>(); this.points.addAll(points); return this; } /** * Set points that define cell centers. * * @param points * 2D array of double of vertex positions * @return self */ public HEMC_VoronoiBox setPoints(final double[][] points) { final int n = points.length; this.points = new FastList<WB_Coord>(); for (int i = 0; i < n; i++) { this.points.add(new WB_Point(points[i][0], points[i][1], points[i][2])); } return this; } /** * Set points that define cell centers. * * @param points * 2D array of float of vertex positions * @return self */ public HEMC_VoronoiBox setPoints(final float[][] points) { final int n = points.length; this.points = new FastList<WB_Coord>(); for (int i = 0; i < n; i++) { this.points.add(new WB_Point(points[i][0], points[i][1], points[i][2])); } return this; } /** * Set voronoi cell offset. * * @param o * offset * @return self */ public HEMC_VoronoiBox setOffset(final double o) { offset = new WB_ConstantScalarParameter(o); return this; } /** * Set voronoi cell offset. * * @param o * offset * @return self */ public HEMC_VoronoiBox setOffset(final WB_ScalarParameter o) { offset = o; return this; } /** * Set enclosing box limiting cells. * * @param container * enclosing WB_AABB * @return self */ public HEMC_VoronoiBox setContainer(final WB_AABB container) { this.aabb = container; return this; } /** * * * @param b * @return */ public HEMC_VoronoiBox setBruteForce(final boolean b) { bruteForce = b; return this; } @Override void create(final HE_MeshCollection result) { tracker.setStartStatus(this, "Starting HEMC_VoronoiBox"); if (aabb == null) { _numberOfMeshes = 0; return; } if (points == null) { result.add(new HE_Mesh(new HEC_Box().setFromAABB(aabb))); _numberOfMeshes = 1; return; } numberOfPoints = points.size(); tracker.setDuringStatus(this, "Calculating Voronoi cells."); List<WB_VoronoiCell3D> voronoi = bruteForce ? WB_Voronoi.getVoronoi3DBruteForce(points, numberOfPoints, aabb, offset) : WB_Voronoi.getVoronoi3D(points, numberOfPoints, aabb, offset); WB_ProgressCounter counter = new WB_ProgressCounter(voronoi.size(), 10); tracker.setCounterStatus(this, "Creating cell mesh.", counter); for (WB_VoronoiCell3D vor : voronoi) { HE_Mesh m = new HE_Mesh(vor.getMesh()); m.setInternalLabel(vor.getIndex()); m.setUserLabel(vor.getIndex()); result.add(m); counter.increment(); } _numberOfMeshes = result.size(); tracker.setStopStatus(this, "Exiting HEMC_VoronoiBox."); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.nacos.samples.spring.domain; import java.util.Date; import com.alibaba.nacos.api.config.annotation.NacosConfigurationProperties; import com.alibaba.nacos.api.config.annotation.NacosIgnore; import com.alibaba.nacos.api.config.annotation.NacosProperty; import static com.alibaba.nacos.samples.spring.domain.Pojo.DATA_ID; /** * POJO * * @author <a href="mailto:mercyblitz@gmail.com">Mercy</a> * @since 0.1.0 */ @NacosConfigurationProperties(dataId = DATA_ID, autoRefreshed = true) public class Pojo { public static final String DATA_ID = "pojo-data-id"; private Long id; private String name; private Date created; @NacosProperty("desc") private String description; @NacosIgnore private boolean ignored; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Date getCreated() { return created; } public void setCreated(Date created) { this.created = created; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public boolean isIgnored() { return ignored; } public void setIgnored(boolean ignored) { this.ignored = ignored; } @Override public String toString() { return "Pojo{" + "id=" + id + ", name='" + name + '\'' + ", created=" + created + ", description='" + description + '\'' + ", ignored=" + ignored + '}'; } }
package net.predictblty.machinelearning.mlcommon; import com.hazelcast.config.Config; import com.hazelcast.config.NetworkConfig; import com.hazelcast.core.Hazelcast; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.util.Base64; import java.io.Serializable; import java.nio.ByteBuffer; import java.util.*; /** * Created by berkgokden on 12/21/14. */ public class HelpfulMethods { private HelpfulMethods() { } /* * Build cluster method taken from hazelcast-mapreduce-presentation * */ public static HazelcastInstance buildCluster(int memberCount) { Config config = new Config(); NetworkConfig networkConfig = config.getNetworkConfig(); networkConfig.getJoin().getMulticastConfig().setEnabled(false); networkConfig.getJoin().getTcpIpConfig().setEnabled(true); networkConfig.getJoin().getTcpIpConfig().setMembers(Arrays.asList(new String[]{"127.0.0.1"})); HazelcastInstance[] hazelcastInstances = new HazelcastInstance[memberCount]; for (int i = 0; i < memberCount; i++) { hazelcastInstances[i] = Hazelcast.newHazelcastInstance(config); } return hazelcastInstances[0]; } /* * Build cluster method taken from hazelcast-mapreduce-presentation * */ public static HazelcastInstance[] buildClusterReturnCluster(int memberCount) { Config config = new Config(); //NetworkConfig networkConfig = config.getNetworkConfig(); //networkConfig.getJoin().getMulticastConfig().setEnabled(false); //networkConfig.getJoin().getTcpIpConfig().setEnabled(true); //networkConfig.getJoin().getTcpIpConfig().setMembers(Arrays.asList(new String[]{"127.0.0.1"})); HazelcastInstance[] hazelcastInstances = new HazelcastInstance[memberCount]; for (int i = 0; i < memberCount; i++) { hazelcastInstances[i] = Hazelcast.newHazelcastInstance(config); } return hazelcastInstances; } public static double compareClassificationsWithClass(Collection<Classification> classifications, String classification) { double success = 0; for (Classification classification1 : classifications) { if (classification1.getClassification().equals(classification)) { success = classification1.getConfidence(); break; } } return success; } public static ByteHolder generateSortedFetureByteArray(Map<String, Serializable> featureMap) { List<String> list = new ArrayList<String>(featureMap.size()); int requiredSize = 0; for (Map.Entry<String, Serializable> stringSerializableEntry : featureMap.entrySet()) { list.add(stringSerializableEntry.getValue().toString()); requiredSize += stringSerializableEntry.getValue().toString().getBytes().length+1; } Collections.sort(list); byte end = 0; ByteBuffer byteBuffer = ByteBuffer.allocate(requiredSize); for (String s : list) { //byte[] encodedBytes = Base64.encode(s.getBytes()); byteBuffer.put(s.getBytes()); byteBuffer.put(end); } return new ByteHolder(byteBuffer.array()); } }
/* * Copyright (c) 2011-2017 Pivotal Software Inc, All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package reactor.core.publisher; import java.util.Objects; import java.util.function.BiFunction; import java.util.function.Function; import javax.annotation.Nullable; import org.reactivestreams.Publisher; import reactor.core.CoreSubscriber; import reactor.core.Scannable; /** * A decorating {@link Mono} {@link Publisher} that exposes {@link Mono} API over an * arbitrary {@link Publisher} Useful to create operators which return a {@link Mono}. * * @param <I> delegate {@link Publisher} type * @param <O> produced type */ public abstract class MonoOperator<I, O> extends Mono<O> implements Scannable { protected final Mono<? extends I> source; /** * Build a {@link MonoOperator} wrapper around the passed parent {@link Publisher} * * @param source the {@link Publisher} to decorate */ protected MonoOperator(Mono<? extends I> source) { this.source = Objects.requireNonNull(source); } @Override @Nullable public Object scanUnsafe(Attr key) { if (key == Attr.PREFETCH) return Integer.MAX_VALUE; if (key == Attr.PARENT) return source; return null; } }
/* * Copyright (c) 2021, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ package com.salesforce.apollo.crypto; import static java.util.Objects.requireNonNull; import java.util.Arrays; import java.util.Objects; import java.util.Optional; import java.util.stream.IntStream; import java.util.stream.Stream; import org.apache.commons.math3.fraction.Fraction; import com.salesforce.apollo.crypto.SigningThreshold.Weighted.Weight; import com.salesforce.apollo.crypto.SigningThreshold.Weighted.Weight.WeightImpl; import com.salesforce.apollo.crypto.SigningThreshold.Weighted.WeightedImpl; /** * @author hal.hildebrand * */ public interface SigningThreshold { interface Unweighted extends SigningThreshold { int getThreshold(); } interface Weighted extends SigningThreshold { interface Weight { class WeightImpl implements Weight { private final Integer denominator; private final Integer numerator; public WeightImpl(Integer numerator, Integer denominator) { this.numerator = numerator; this.denominator = denominator; } @Override public Optional<Integer> denominator() { return Optional.ofNullable(denominator); } @Override public int numerator() { return numerator; } @Override public int hashCode() { return Objects.hash(denominator, numerator); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof WeightImpl)) { return false; } WeightImpl other = (WeightImpl) obj; return Objects.equals(denominator, other.denominator) && Objects.equals(numerator, other.numerator); } } Optional<Integer> denominator(); int numerator(); } class WeightedImpl implements Weighted { private final Weight[][] weights; public WeightedImpl(Weight[][] weightGroups) { this.weights = weightGroups; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof WeightedImpl)) { return false; } WeightedImpl other = (WeightedImpl) obj; return Arrays.deepEquals(weights, other.weights); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.deepHashCode(weights); return result; } @Override public Weight[][] getWeights() { return weights; } } Weight[][] getWeights(); } public static int countWeights(Weight[][] weights) { return Arrays.stream(weights).mapToInt(w -> w.length).sum(); } public static Weight[] group(String... weights) { return Stream.of(weights).map(SigningThreshold::weight).toArray(Weight[]::new); } public static Weight[] group(Weight... weights) { return weights; } public static boolean thresholdMet(SigningThreshold threshold, int[] indexes) { if (threshold instanceof SigningThreshold.Unweighted) { return thresholdMet((SigningThreshold.Unweighted) threshold, indexes); } else if (threshold instanceof SigningThreshold.Weighted) { return thresholdMet((SigningThreshold.Weighted) threshold, indexes); } else { throw new IllegalArgumentException("Unknown threshold type: " + threshold.getClass().getCanonicalName()); } } public static boolean thresholdMet(SigningThreshold.Unweighted threshold, int[] indexes) { requireNonNull(indexes, "indexes"); return indexes.length >= threshold.getThreshold(); } public static boolean thresholdMet(SigningThreshold.Weighted threshold, int[] indexes) { requireNonNull(indexes); if (indexes.length == 0) { return false; } var maxIndex = IntStream.of(indexes).max().getAsInt(); var countWeights = countWeights(threshold.getWeights()); var sats = prefillSats(Integer.max(maxIndex + 1, countWeights)); for (var i : indexes) { sats[i] = true; } var index = 0; for (var clause : threshold.getWeights()) { var accumulator = Fraction.ZERO; for (var weight : clause) { if (sats[index]) { accumulator = accumulator.add(fraction(weight)); } index++; } if (accumulator.compareTo(Fraction.ONE) < 0) { return false; } } return true; } public static SigningThreshold.Unweighted unweighted(int threshold) { if (threshold < 0) { throw new IllegalArgumentException("threshold must be >= 0"); } return new Unweighted() { @Override public int getThreshold() { return threshold; } }; } public static Weight weight(int value) { return weight(value, null); } public static Weight weight(int numerator, Integer denominator) { if (denominator != null && denominator <= 0) { throw new IllegalArgumentException("denominator must be > 0"); } if (numerator <= 0) { throw new IllegalArgumentException("numerator must be > 0"); } return new WeightImpl(numerator, denominator); } public static Weight weight(String value) { var parts = value.split("/"); if (parts.length == 1) { return weight(Integer.parseInt(parts[0])); } else if (parts.length == 2) { return weight(Integer.parseInt(parts[0]), Integer.parseInt(parts[1])); } else { throw new IllegalArgumentException("invalid weight: " + value); } } public static SigningThreshold.Weighted weighted(String... weightsAsStrings) { var weights = Stream.of(weightsAsStrings).map(SigningThreshold::weight).toArray(Weight[]::new); return weighted(weights); } public static SigningThreshold.Weighted weighted(Weight... weights) { return weighted(new Weight[][] { weights }); } public static SigningThreshold.Weighted weighted(Weight[]... weightGroups) { for (var group : weightGroups) { if (!sumGreaterThanOrEqualToOne(group)) { throw new IllegalArgumentException("group sum is less than 1: " + Arrays.deepToString(group)); } } return new WeightedImpl(weightGroups); } private static Fraction fraction(Weight weight) { if (weight.denominator().isEmpty()) { return new Fraction(weight.numerator()); } return new Fraction(weight.numerator(), weight.denominator().get()); } private static boolean[] prefillSats(int count) { var sats = new boolean[count]; Arrays.fill(sats, false); return sats; } private static boolean sumGreaterThanOrEqualToOne(Weight[] weights) { var sum = Fraction.ZERO; for (var w : weights) { // noinspection ObjectAllocationInLoop sum = sum.add(fraction(w)); } return sum.compareTo(Fraction.ONE) >= 0; } }
package sets; /** * <p>Title: </p> * * <p>Description: </p> * * <p>Copyright: Copyright (c) 2005</p> * * <p>Company: </p> * * @author Prof. Weining Zhang * @version 1.0 */ public class EmptySetException extends RuntimeException{ public EmptySetException(){ super ("Set is Empty"); } public EmptySetException(String msg){ super("Set is Empty:" +msg); } }
/* * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.process.core.context.exception; import java.util.HashMap; import java.util.Map; import org.jbpm.process.core.Context; import org.jbpm.process.core.context.AbstractContext; public class ExceptionScope extends AbstractContext { private static final long serialVersionUID = 510l; public static final String EXCEPTION_SCOPE = "ExceptionScope"; protected Map<String, ExceptionHandler> exceptionHandlers = new HashMap<String, ExceptionHandler>(); public String getType() { return EXCEPTION_SCOPE; } public void setExceptionHandler(String exception, ExceptionHandler exceptionHandler) { this.exceptionHandlers.put(exception, exceptionHandler); } public ExceptionHandler getExceptionHandler(String exception) { ExceptionHandler result = exceptionHandlers.get(exception); if (result == null) { result = exceptionHandlers.get(null); } return result; } public void removeExceptionHandler(String exception) { this.exceptionHandlers.remove(exception); } public Map<String, ExceptionHandler> getExceptionHandlers() { return exceptionHandlers; } public void setExceptionHandlers(Map<String, ExceptionHandler> exceptionHandlers) { if (exceptionHandlers == null) { throw new IllegalArgumentException("Exception handlers are null"); } this.exceptionHandlers = exceptionHandlers; } public Context resolveContext(Object param) { if (param instanceof String) { return getExceptionHandler((String) param) == null ? null : this; } throw new IllegalArgumentException( "ExceptionScopes can only resolve exception names: " + param); } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer.upstream; import com.google.android.exoplayer.util.Assertions; import java.nio.ByteBuffer; /** * An implementation of {@link NonBlockingInputStream} for reading data from a byte array. */ public final class ByteArrayNonBlockingInputStream implements NonBlockingInputStream { private final byte[] data; private int position; public ByteArrayNonBlockingInputStream(byte[] data) { this.data = Assertions.checkNotNull(data); } @Override public int skip(int length) { int skipLength = getReadLength(length); position += skipLength; return skipLength; } @Override public int read(byte[] buffer, int offset, int length) { if (isEndOfStream()) { return -1; } int readLength = getReadLength(length); System.arraycopy(data, position, buffer, offset, readLength); position += readLength; return readLength; } @Override public int read(ByteBuffer buffer, int length) { if (isEndOfStream()) { return -1; } int readLength = getReadLength(length); buffer.put(data, position, readLength); position += readLength; return readLength; } @Override public long getAvailableByteCount() { return data.length - position; } @Override public boolean isEndOfStream() { return position == data.length; } @Override public void close() { // Do nothing. } private int getReadLength(int requestedLength) { return Math.min(requestedLength, data.length - position); } }
package com.kelin.mvvmlight.zhihu.news; import java.util.List; import retrofit2.http.GET; import rx.Observable; /** * Created by kelin on 16-4-26. */ public interface TopNewsService { @GET("/api/4/news/latest") public Observable<News> getTopNewsList(); public class News { private String date; private List<StoriesBean> stories; private List<TopStoriesBean> top_stories; public String getDate() { return date; } public void setDate(String date) { this.date = date; } public List<StoriesBean> getStories() { return stories; } public void setStories(List<StoriesBean> stories) { this.stories = stories; } public List<TopStoriesBean> getTop_stories() { return top_stories; } public void setTop_stories(List<TopStoriesBean> top_stories) { this.top_stories = top_stories; } public static class StoriesBean { private ExtraField extraField; private String title; private String ga_prefix; private boolean multipic; private int type; private long id; private List<String> images; public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getGa_prefix() { return ga_prefix; } public void setGa_prefix(String ga_prefix) { this.ga_prefix = ga_prefix; } public boolean isMultipic() { return multipic; } public void setMultipic(boolean multipic) { this.multipic = multipic; } public int getType() { return type; } public void setType(int type) { this.type = type; } public long getId() { return id; } public void setId(long id) { this.id = id; } public List<String> getImages() { return images; } public void setImages(List<String> images) { this.images = images; } public ExtraField getExtraField() { return extraField; } public void setExtraField(ExtraField extraField) { this.extraField = extraField; } public static class ExtraField { private boolean isHeader; private String date; public ExtraField(boolean isHeader, String date) { this.isHeader = isHeader; this.date = date; } public boolean isHeader() { return isHeader; } public void setHeader(boolean header) { isHeader = header; } public String getDate() { return date; } public void setDate(String date) { this.date = date; } } public StoriesBean(ExtraField extraField) { this.extraField = extraField; } } public static class TopStoriesBean { private String image; private int type; private long id; private String ga_prefix; private String title; public String getImage() { return image; } public void setImage(String image) { this.image = image; } public int getType() { return type; } public void setType(int type) { this.type = type; } public long getId() { return id; } public void setId(long id) { this.id = id; } public String getGa_prefix() { return ga_prefix; } public void setGa_prefix(String ga_prefix) { this.ga_prefix = ga_prefix; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } } } }
package org.genyris.java; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import org.genyris.core.Exp; import org.genyris.core.Internable; import org.genyris.core.Symbol; import org.genyris.exception.GenyrisException; import org.genyris.interp.Closure; import org.genyris.interp.Environment; import org.genyris.interp.Interpreter; public class JavaMethod extends AbstractJavaMethod { protected Method method; protected Class[] params; public JavaMethod(Interpreter interp, String name, Method method, Class[] params) throws GenyrisException { super(interp, name); this.method = method; this.params = params; } public Symbol getBuiltinClassSymbol(Internable table) { return table.JAVAMETHOD(); } public Exp bindAndExecute(Closure proc, Exp[] arguments, Environment envForBindOperations) throws GenyrisException { Object object = getSelfJava(envForBindOperations).getValue(); Object[] javaArgsArray = JavaUtils.toJavaArray(params, arguments, envForBindOperations); try { method.setAccessible(true); Object rawResult = method.invoke(object, javaArgsArray); return JavaUtils.javaToGenyris(envForBindOperations, rawResult); } catch (IllegalArgumentException e) { e.printStackTrace(); throw new GenyrisException("Java " + this + " " + e.getClass().getName() + " " + e.getMessage()); } catch (IllegalAccessException e) { e.printStackTrace(); throw new GenyrisException("Java " + this + " " + e.getClass().getName() + " " + e.getMessage()); } catch (InvocationTargetException e) { e.getCause().printStackTrace(); throw new GenyrisException("Java " + this + " " + e.getCause().getClass().getName() + " " + e.getCause().getMessage()); } } public String toString() { return "JavaMethod " + getName(); } }
package seedu.address.ui; import javafx.fxml.FXML; import javafx.scene.control.Label; import javafx.scene.layout.HBox; import javafx.scene.layout.Region; import seedu.address.model.student.Student; /** * A UI component that displays information of a {@code Student}. */ public class StudentCard extends UiPart<Region> { private static final String FXML = "StudentListCard.fxml"; /** * Note: Certain keywords such as "location" and "resources" are reserved keywords in JavaFX. * As a consequence, UI elements' variable names cannot be set to such keywords * or an exception will be thrown by JavaFX during runtime. * * @see <a href="https://github.com/se-edu/addressbook-level4/issues/336">The issue on AddressBook level 4</a> */ public final Student student; @FXML private HBox cardPane; @FXML private Label name; @FXML private Label id; @FXML private Label telegramHandle; @FXML private Label email; @FXML private Label groupName; /** * Creates a {@code StudentCard} with the given {@code Student} and index to display. */ public StudentCard(Student student, int displayedIndex) { super(FXML); this.student = student; id.setText(displayedIndex + ". "); name.setText(student.getName().fullName); groupName.setText(student.getGroup().getGroupName().toString()); telegramHandle.setText(student.getTelegramHandle().value); email.setText(student.getEmail().value); } @Override public boolean equals(Object other) { // short circuit if same object if (other == this) { return true; } // instanceof handles nulls if (!(other instanceof StudentCard)) { return false; } // state check StudentCard card = (StudentCard) other; return id.getText().equals(card.id.getText()) && student.equals(card.student); } }
package cn.fivestars.pagehelperdemo; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.transaction.annotation.EnableTransactionManagement; @SpringBootApplication @EnableTransactionManagement public class PageHelperDemoApplication { public static void main(String[] args) { SpringApplication.run(PageHelperDemoApplication.class, args); } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simplesystemsmanagement.model.transform; import java.util.Map; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.simplesystemsmanagement.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * FailureDetailsMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class FailureDetailsMarshaller { private static final MarshallingInfo<String> FAILURESTAGE_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("FailureStage").build(); private static final MarshallingInfo<String> FAILURETYPE_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("FailureType").build(); private static final MarshallingInfo<Map> DETAILS_BINDING = MarshallingInfo.builder(MarshallingType.MAP).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("Details").build(); private static final FailureDetailsMarshaller instance = new FailureDetailsMarshaller(); public static FailureDetailsMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(FailureDetails failureDetails, ProtocolMarshaller protocolMarshaller) { if (failureDetails == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(failureDetails.getFailureStage(), FAILURESTAGE_BINDING); protocolMarshaller.marshall(failureDetails.getFailureType(), FAILURETYPE_BINDING); protocolMarshaller.marshall(failureDetails.getDetails(), DETAILS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
/* * Copyright 2004-2019 H2 Group. Multiple-Licensed under the MPL 2.0, * and the EPL 1.0 (http://h2database.com/html/license.html). * Initial Developer: H2 Group */ package org.gridgain.internal.h2.tools; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintWriter; import java.io.Reader; import java.io.SequenceInputStream; import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.BitSet; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.zip.CRC32; import org.gridgain.internal.h2.compress.CompressLZF; import org.gridgain.internal.h2.engine.Constants; import org.gridgain.internal.h2.engine.DbObject; import org.gridgain.internal.h2.engine.MetaRecord; import org.gridgain.internal.h2.message.DbException; import org.gridgain.internal.h2.mvstore.MVMap; import org.gridgain.internal.h2.mvstore.MVStore; import org.gridgain.internal.h2.mvstore.MVStoreTool; import org.gridgain.internal.h2.mvstore.StreamStore; import org.gridgain.internal.h2.mvstore.db.ValueDataType; import org.gridgain.internal.h2.mvstore.tx.TransactionMap; import org.gridgain.internal.h2.mvstore.tx.TransactionStore; import org.gridgain.internal.h2.result.Row; import org.gridgain.internal.h2.result.RowFactory; import org.gridgain.internal.h2.result.SimpleRow; import org.gridgain.internal.h2.security.SHA256; import org.gridgain.internal.h2.store.fs.FileUtils; import org.gridgain.internal.h2.value.CompareMode; import org.gridgain.internal.h2.value.Value; import org.gridgain.internal.h2.value.ValueArray; import org.gridgain.internal.h2.value.ValueLob; import org.gridgain.internal.h2.value.ValueLobDb; import org.gridgain.internal.h2.value.ValueLong; import org.gridgain.internal.h2.api.ErrorCode; import org.gridgain.internal.h2.api.JavaObjectSerializer; import org.gridgain.internal.h2.jdbc.JdbcConnection; import org.gridgain.internal.h2.store.Data; import org.gridgain.internal.h2.store.DataHandler; import org.gridgain.internal.h2.store.DataReader; import org.gridgain.internal.h2.store.FileLister; import org.gridgain.internal.h2.store.FileStore; import org.gridgain.internal.h2.store.FileStoreInputStream; import org.gridgain.internal.h2.store.LobStorageBackend; import org.gridgain.internal.h2.store.LobStorageFrontend; import org.gridgain.internal.h2.store.LobStorageMap; import org.gridgain.internal.h2.store.Page; import org.gridgain.internal.h2.store.PageFreeList; import org.gridgain.internal.h2.store.PageLog; import org.gridgain.internal.h2.store.PageStore; import org.gridgain.internal.h2.util.IOUtils; import org.gridgain.internal.h2.util.IntArray; import org.gridgain.internal.h2.util.MathUtils; import org.gridgain.internal.h2.util.SmallLRUCache; import org.gridgain.internal.h2.util.StringUtils; import org.gridgain.internal.h2.util.TempFileDeleter; import org.gridgain.internal.h2.util.Tool; import org.gridgain.internal.h2.util.Utils; /** * Helps recovering a corrupted database. * @h2.resource */ public class Recover extends Tool implements DataHandler { private String databaseName; private int storageId; private String storageName; private int recordLength; private int valueId; private boolean trace; private boolean transactionLog; private ArrayList<MetaRecord> schema; private HashSet<Integer> objectIdSet; private HashMap<Integer, String> tableMap; private HashMap<String, String> columnTypeMap; private boolean remove; private int pageSize; private FileStore store; private int[] parents; private Stats stat; private boolean lobMaps; /** * Statistic data */ static class Stats { /** * The empty space in bytes in a data leaf pages. */ long pageDataEmpty; /** * The number of bytes used for data. */ long pageDataRows; /** * The number of bytes used for the page headers. */ long pageDataHead; /** * The count per page type. */ final int[] pageTypeCount = new int[Page.TYPE_STREAM_DATA + 2]; /** * The number of free pages. */ int free; } /** * Options are case sensitive. Supported options are: * <table> * <tr><td>[-help] or [-?]</td> * <td>Print the list of options</td></tr> * <tr><td>[-dir &lt;dir&gt;]</td> * <td>The directory (default: .)</td></tr> * <tr><td>[-db &lt;database&gt;]</td> * <td>The database name (all databases if not set)</td></tr> * <tr><td>[-trace]</td> * <td>Print additional trace information</td></tr> * <tr><td>[-transactionLog]</td> * <td>Print the transaction log</td></tr> * </table> * Encrypted databases need to be decrypted first. * @h2.resource * * @param args the command line arguments */ public static void main(String... args) throws SQLException { new Recover().runTool(args); } /** * Dumps the contents of a database file to a human readable text file. This * text file can be used to recover most of the data. This tool does not * open the database and can be used even if the database files are * corrupted. A database can get corrupted if there is a bug in the database * engine or file system software, or if an application writes into the * database file that doesn't understand the file format, or if there is * a hardware problem. * * @param args the command line arguments */ @Override public void runTool(String... args) throws SQLException { String dir = "."; String db = null; for (int i = 0; args != null && i < args.length; i++) { String arg = args[i]; if ("-dir".equals(arg)) { dir = args[++i]; } else if ("-db".equals(arg)) { db = args[++i]; } else if ("-removePassword".equals(arg)) { remove = true; } else if ("-trace".equals(arg)) { trace = true; } else if ("-transactionLog".equals(arg)) { transactionLog = true; } else if (arg.equals("-help") || arg.equals("-?")) { showUsage(); return; } else { showUsageAndThrowUnsupportedOption(arg); } } process(dir, db); } /** * INTERNAL */ public static Reader readClob(String fileName) throws IOException { return new BufferedReader(new InputStreamReader(readBlob(fileName), StandardCharsets.UTF_8)); } /** * INTERNAL */ public static InputStream readBlob(String fileName) throws IOException { return new BufferedInputStream(FileUtils.newInputStream(fileName)); } /** * INTERNAL */ public static ValueLobDb readBlobDb(Connection conn, long lobId, long precision) { DataHandler h = ((JdbcConnection) conn).getSession().getDataHandler(); verifyPageStore(h); ValueLobDb lob = ValueLobDb.create(Value.BLOB, h, LobStorageFrontend.TABLE_TEMP, lobId, null, precision); lob.setRecoveryReference(true); return lob; } private static void verifyPageStore(DataHandler h) { if (h.getLobStorage() instanceof LobStorageMap) { throw DbException.get(ErrorCode.FEATURE_NOT_SUPPORTED_1, "Restore page store recovery SQL script " + "can only be restored to a PageStore file"); } } /** * INTERNAL */ public static ValueLobDb readClobDb(Connection conn, long lobId, long precision) { DataHandler h = ((JdbcConnection) conn).getSession().getDataHandler(); verifyPageStore(h); ValueLobDb lob = ValueLobDb.create(Value.CLOB, h, LobStorageFrontend.TABLE_TEMP, lobId, null, precision); lob.setRecoveryReference(true); return lob; } /** * INTERNAL */ public static InputStream readBlobMap(Connection conn, long lobId, long precision) throws SQLException { final PreparedStatement prep = conn.prepareStatement( "SELECT DATA FROM INFORMATION_SCHEMA.LOB_BLOCKS " + "WHERE LOB_ID = ? AND SEQ = ? AND ? > 0"); prep.setLong(1, lobId); // precision is currently not really used, // it is just to improve readability of the script prep.setLong(3, precision); return new SequenceInputStream( new Enumeration<InputStream>() { private int seq; private byte[] data = fetch(); private byte[] fetch() { try { prep.setInt(2, seq++); ResultSet rs = prep.executeQuery(); if (rs.next()) { return rs.getBytes(1); } return null; } catch (SQLException e) { throw DbException.convert(e); } } @Override public boolean hasMoreElements() { return data != null; } @Override public InputStream nextElement() { ByteArrayInputStream in = new ByteArrayInputStream(data); data = fetch(); return in; } } ); } /** * INTERNAL */ public static Reader readClobMap(Connection conn, long lobId, long precision) throws Exception { InputStream in = readBlobMap(conn, lobId, precision); return new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); } private void trace(String message) { if (trace) { out.println(message); } } private void traceError(String message, Throwable t) { out.println(message + ": " + t.toString()); if (trace) { t.printStackTrace(out); } } /** * Dumps the contents of a database to a SQL script file. * * @param dir the directory * @param db the database name (null for all databases) */ public static void execute(String dir, String db) throws SQLException { try { new Recover().process(dir, db); } catch (DbException e) { throw DbException.toSQLException(e); } } private void process(String dir, String db) { ArrayList<String> list = FileLister.getDatabaseFiles(dir, db, true); if (list.isEmpty()) { printNoDatabaseFilesFound(dir, db); } for (String fileName : list) { if (fileName.endsWith(Constants.SUFFIX_PAGE_FILE)) { dumpPageStore(fileName); } else if (fileName.endsWith(Constants.SUFFIX_LOB_FILE)) { dumpLob(fileName, false); } else if (fileName.endsWith(Constants.SUFFIX_MV_FILE)) { String f = fileName.substring(0, fileName.length() - Constants.SUFFIX_PAGE_FILE.length()); try (PrintWriter writer = getWriter(fileName, ".txt")) { MVStoreTool.dump(fileName, writer, true); MVStoreTool.info(fileName, writer); } try (PrintWriter writer = getWriter(f + ".h2.db", ".sql")) { dumpMVStoreFile(writer, fileName); } } } } private PrintWriter getWriter(String fileName, String suffix) { fileName = fileName.substring(0, fileName.length() - 3); String outputFile = fileName + suffix; trace("Created file: " + outputFile); try { return new PrintWriter(IOUtils.getBufferedWriter( FileUtils.newOutputStream(outputFile, false))); } catch (IOException e) { throw DbException.convertIOException(e, null); } } private void writeDataError(PrintWriter writer, String error, byte[] data) { writer.println("-- ERROR: " + error + " storageId: " + storageId + " recordLength: " + recordLength + " valueId: " + valueId); StringBuilder sb = new StringBuilder(); for (byte aData1 : data) { int x = aData1 & 0xff; if (x >= ' ' && x < 128) { sb.append((char) x); } else { sb.append('?'); } } writer.println("-- dump: " + sb.toString()); sb = new StringBuilder(); for (byte aData : data) { int x = aData & 0xff; sb.append(' '); if (x < 16) { sb.append('0'); } sb.append(Integer.toHexString(x)); } writer.println("-- dump: " + sb.toString()); } private void dumpLob(String fileName, boolean lobCompression) { OutputStream fileOut = null; FileStore fileStore = null; long size = 0; String n = fileName + (lobCompression ? ".comp" : "") + ".txt"; InputStream in = null; try { fileOut = FileUtils.newOutputStream(n, false); fileStore = FileStore.open(null, fileName, "r"); fileStore.init(); in = new FileStoreInputStream(fileStore, this, lobCompression, false); size = IOUtils.copy(in, fileOut); } catch (Throwable e) { // this is usually not a problem, because we try both compressed and // uncompressed } finally { IOUtils.closeSilently(fileOut); IOUtils.closeSilently(in); closeSilently(fileStore); } if (size == 0) { try { FileUtils.delete(n); } catch (Exception e) { traceError(n, e); } } } private void getSQL(StringBuilder builder, String column, Value v) { if (v instanceof ValueLob) { ValueLob lob = (ValueLob) v; byte[] small = lob.getSmall(); if (small == null) { String file = lob.getFileName(); String type = lob.getValueType() == Value.BLOB ? "BLOB" : "CLOB"; if (lob.isCompressed()) { dumpLob(file, true); file += ".comp"; } builder.append("READ_").append(type).append("('").append(file).append(".txt')"); return; } } else if (v instanceof ValueLobDb) { ValueLobDb lob = (ValueLobDb) v; byte[] small = lob.getSmall(); if (small == null) { int type = lob.getValueType(); long id = lob.getLobId(); long precision = lob.getType().getPrecision(); String columnType; if (type == Value.BLOB) { columnType = "BLOB"; builder.append("READ_BLOB"); } else { columnType = "CLOB"; builder.append("READ_CLOB"); } if (lobMaps) { builder.append("_MAP"); } else { builder.append("_DB"); } columnTypeMap.put(column, columnType); builder.append('(').append(id).append(", ").append(precision).append(')'); return; } } v.getSQL(builder); } private void setDatabaseName(String name) { databaseName = name; } private void dumpPageStore(String fileName) { setDatabaseName(fileName.substring(0, fileName.length() - Constants.SUFFIX_PAGE_FILE.length())); PrintWriter writer = null; stat = new Stats(); try { writer = getWriter(fileName, ".sql"); writer.println("CREATE ALIAS IF NOT EXISTS READ_BLOB FOR \"" + this.getClass().getName() + ".readBlob\";"); writer.println("CREATE ALIAS IF NOT EXISTS READ_CLOB FOR \"" + this.getClass().getName() + ".readClob\";"); writer.println("CREATE ALIAS IF NOT EXISTS READ_BLOB_DB FOR \"" + this.getClass().getName() + ".readBlobDb\";"); writer.println("CREATE ALIAS IF NOT EXISTS READ_CLOB_DB FOR \"" + this.getClass().getName() + ".readClobDb\";"); resetSchema(); store = FileStore.open(null, fileName, remove ? "rw" : "r"); long length = store.length(); try { store.init(); } catch (Exception e) { writeError(writer, e); } Data s = Data.create(this, 128, false); seek(0); store.readFully(s.getBytes(), 0, 128); s.setPos(48); pageSize = s.readInt(); int writeVersion = s.readByte(); int readVersion = s.readByte(); writer.println("-- pageSize: " + pageSize + " writeVersion: " + writeVersion + " readVersion: " + readVersion); if (pageSize < PageStore.PAGE_SIZE_MIN || pageSize > PageStore.PAGE_SIZE_MAX) { pageSize = Constants.DEFAULT_PAGE_SIZE; writer.println("-- ERROR: page size; using " + pageSize); } long pageCount = length / pageSize; parents = new int[(int) pageCount]; s = Data.create(this, pageSize, false); for (long i = 3; i < pageCount; i++) { s.reset(); seek(i); store.readFully(s.getBytes(), 0, 32); s.readByte(); s.readShortInt(); parents[(int) i] = s.readInt(); } int logKey = 0, logFirstTrunkPage = 0, logFirstDataPage = 0; s = Data.create(this, pageSize, false); for (long i = 1;; i++) { if (i == 3) { break; } s.reset(); seek(i); store.readFully(s.getBytes(), 0, pageSize); CRC32 crc = new CRC32(); crc.update(s.getBytes(), 4, pageSize - 4); int expected = (int) crc.getValue(); int got = s.readInt(); long writeCounter = s.readLong(); int key = s.readInt(); int firstTrunkPage = s.readInt(); int firstDataPage = s.readInt(); if (expected == got) { logKey = key; logFirstTrunkPage = firstTrunkPage; logFirstDataPage = firstDataPage; } writer.println("-- head " + i + ": writeCounter: " + writeCounter + " log " + key + ":" + firstTrunkPage + "/" + firstDataPage + " crc " + got + " (" + (expected == got ? "ok" : ("expected: " + expected)) + ")"); } writer.println("-- log " + logKey + ":" + logFirstTrunkPage + "/" + logFirstDataPage); PrintWriter devNull = new PrintWriter(new OutputStream() { @Override public void write(int b) { // ignore } }); dumpPageStore(devNull, pageCount); stat = new Stats(); schema.clear(); objectIdSet = new HashSet<>(); dumpPageStore(writer, pageCount); writeSchemaSET(writer); writeSchema(writer); try { dumpPageLogStream(writer, logKey, logFirstTrunkPage, logFirstDataPage, pageCount); } catch (IOException e) { // ignore } writer.println("---- Statistics ----"); writer.println("-- page count: " + pageCount + ", free: " + stat.free); long total = Math.max(1, stat.pageDataRows + stat.pageDataEmpty + stat.pageDataHead); writer.println("-- page data bytes: head " + stat.pageDataHead + ", empty " + stat.pageDataEmpty + ", rows " + stat.pageDataRows + " (" + (100 - 100L * stat.pageDataEmpty / total) + "% full)"); for (int i = 0; i < stat.pageTypeCount.length; i++) { int count = stat.pageTypeCount[i]; if (count > 0) { writer.println("-- " + getPageType(i) + " " + (100 * count / pageCount) + "%, " + count + " page(s)"); } } writer.close(); } catch (Throwable e) { writeError(writer, e); } finally { IOUtils.closeSilently(writer); closeSilently(store); } } private void dumpMVStoreFile(PrintWriter writer, String fileName) { writer.println("-- MVStore"); writer.println("CREATE ALIAS IF NOT EXISTS READ_BLOB FOR \"" + this.getClass().getName() + ".readBlob\";"); writer.println("CREATE ALIAS IF NOT EXISTS READ_CLOB FOR \"" + this.getClass().getName() + ".readClob\";"); writer.println("CREATE ALIAS IF NOT EXISTS READ_BLOB_DB FOR \"" + this.getClass().getName() + ".readBlobDb\";"); writer.println("CREATE ALIAS IF NOT EXISTS READ_CLOB_DB FOR \"" + this.getClass().getName() + ".readClobDb\";"); writer.println("CREATE ALIAS IF NOT EXISTS READ_BLOB_MAP FOR \"" + this.getClass().getName() + ".readBlobMap\";"); writer.println("CREATE ALIAS IF NOT EXISTS READ_CLOB_MAP FOR \"" + this.getClass().getName() + ".readClobMap\";"); resetSchema(); setDatabaseName(fileName.substring(0, fileName.length() - Constants.SUFFIX_MV_FILE.length())); MVStore mv = new MVStore.Builder(). fileName(fileName).readOnly().open(); dumpLobMaps(writer, mv); writer.println("-- Meta"); dumpMeta(writer, mv); writer.println("-- Tables"); TransactionStore store = new TransactionStore(mv); try { store.init(); } catch (Throwable e) { writeError(writer, e); } try { // extract the metadata so we can dump the settings ValueDataType type = new ValueDataType(); for (String mapName : mv.getMapNames()) { if (!mapName.startsWith("table.")) { continue; } String tableId = mapName.substring("table.".length()); if (Integer.parseInt(tableId) == 0) { TransactionMap<Value, Value> dataMap = store.begin().openMap(mapName, type, type); Iterator<Value> dataIt = dataMap.keyIterator(null); while (dataIt.hasNext()) { Value rowId = dataIt.next(); Value[] values = ((ValueArray) dataMap.get(rowId)) .getList(); try { SimpleRow r = new SimpleRow(values); MetaRecord meta = new MetaRecord(r); schema.add(meta); if (meta.getObjectType() == DbObject.TABLE_OR_VIEW) { String sql = values[3].getString(); String name = extractTableOrViewName(sql); tableMap.put(meta.getId(), name); } } catch (Throwable t) { writeError(writer, t); } } } } // Have to do these before the tables because settings like COLLATION may affect // some of them, and we can't change settings after we have created user tables writeSchemaSET(writer); writer.println("---- Table Data ----"); for (String mapName : mv.getMapNames()) { if (!mapName.startsWith("table.")) { continue; } String tableId = mapName.substring("table.".length()); if (Integer.parseInt(tableId) == 0) { continue; } TransactionMap<Value, Value> dataMap = store.begin().openMap(mapName, type, type); Iterator<Value> dataIt = dataMap.keyIterator(null); boolean init = false; while (dataIt.hasNext()) { Value rowId = dataIt.next(); Value[] values = ((ValueArray) dataMap.get(rowId)).getList(); recordLength = values.length; if (!init) { setStorage(Integer.parseInt(tableId)); // init the column types StringBuilder builder = new StringBuilder(); for (valueId = 0; valueId < recordLength; valueId++) { String columnName = storageName + "." + valueId; builder.setLength(0); getSQL(builder, columnName, values[valueId]); } createTemporaryTable(writer); init = true; } StringBuilder buff = new StringBuilder(); buff.append("INSERT INTO O_").append(tableId) .append(" VALUES("); for (valueId = 0; valueId < recordLength; valueId++) { if (valueId > 0) { buff.append(", "); } String columnName = storageName + "." + valueId; getSQL(buff, columnName, values[valueId]); } buff.append(");"); writer.println(buff.toString()); } } writeSchema(writer); writer.println("DROP ALIAS READ_BLOB_MAP;"); writer.println("DROP ALIAS READ_CLOB_MAP;"); writer.println("DROP TABLE IF EXISTS INFORMATION_SCHEMA.LOB_BLOCKS;"); } catch (Throwable e) { writeError(writer, e); } finally { mv.close(); } } private static void dumpMeta(PrintWriter writer, MVStore mv) { MVMap<String, String> meta = mv.getMetaMap(); for (Entry<String, String> e : meta.entrySet()) { writer.println("-- " + e.getKey() + " = " + e.getValue()); } } private void dumpLobMaps(PrintWriter writer, MVStore mv) { lobMaps = mv.hasMap("lobData"); if (!lobMaps) { return; } MVMap<Long, byte[]> lobData = mv.openMap("lobData"); StreamStore streamStore = new StreamStore(lobData); MVMap<Long, Object[]> lobMap = mv.openMap("lobMap"); writer.println("-- LOB"); writer.println("CREATE TABLE IF NOT EXISTS " + "INFORMATION_SCHEMA.LOB_BLOCKS(" + "LOB_ID BIGINT, SEQ INT, DATA BINARY, " + "PRIMARY KEY(LOB_ID, SEQ));"); boolean hasErrors = false; for (Entry<Long, Object[]> e : lobMap.entrySet()) { long lobId = e.getKey(); Object[] value = e.getValue(); byte[] streamStoreId = (byte[]) value[0]; InputStream in = streamStore.get(streamStoreId); int len = 8 * 1024; byte[] block = new byte[len]; try { for (int seq = 0;; seq++) { int l = IOUtils.readFully(in, block, block.length); if (l > 0) { writer.print("INSERT INTO INFORMATION_SCHEMA.LOB_BLOCKS " + "VALUES(" + lobId + ", " + seq + ", '"); writer.print(StringUtils.convertBytesToHex(block, l)); writer.println("');"); } if (l != len) { break; } } } catch (IOException ex) { writeError(writer, ex); hasErrors = true; } } writer.println("-- lobMap.size: " + lobMap.sizeAsLong()); writer.println("-- lobData.size: " + lobData.sizeAsLong()); if (hasErrors) { writer.println("-- lobMap"); for (Long k : lobMap.keyList()) { Object[] value = lobMap.get(k); byte[] streamStoreId = (byte[]) value[0]; writer.println("-- " + k + " " + StreamStore.toString(streamStoreId)); } writer.println("-- lobData"); for (Long k : lobData.keyList()) { writer.println("-- " + k + " len " + lobData.get(k).length); } } } private static String getPageType(int type) { switch (type) { case 0: return "free"; case Page.TYPE_DATA_LEAF: return "data leaf"; case Page.TYPE_DATA_NODE: return "data node"; case Page.TYPE_DATA_OVERFLOW: return "data overflow"; case Page.TYPE_BTREE_LEAF: return "btree leaf"; case Page.TYPE_BTREE_NODE: return "btree node"; case Page.TYPE_FREE_LIST: return "free list"; case Page.TYPE_STREAM_TRUNK: return "stream trunk"; case Page.TYPE_STREAM_DATA: return "stream data"; } return "[" + type + "]"; } private void dumpPageStore(PrintWriter writer, long pageCount) { Data s = Data.create(this, pageSize, false); for (long page = 3; page < pageCount; page++) { s = Data.create(this, pageSize, false); seek(page); store.readFully(s.getBytes(), 0, pageSize); dumpPage(writer, s, page, pageCount); } } private void dumpPage(PrintWriter writer, Data s, long page, long pageCount) { try { int type = s.readByte(); switch (type) { case Page.TYPE_EMPTY: stat.pageTypeCount[type]++; return; } boolean last = (type & Page.FLAG_LAST) != 0; type &= ~Page.FLAG_LAST; if (!PageStore.checksumTest(s.getBytes(), (int) page, pageSize)) { writeDataError(writer, "checksum mismatch type: " + type, s.getBytes()); } s.readShortInt(); switch (type) { // type 1 case Page.TYPE_DATA_LEAF: { stat.pageTypeCount[type]++; int parentPageId = s.readInt(); setStorage(s.readVarInt()); int columnCount = s.readVarInt(); int entries = s.readShortInt(); writer.println("-- page " + page + ": data leaf " + (last ? "(last) " : "") + "parent: " + parentPageId + " table: " + storageId + " entries: " + entries + " columns: " + columnCount); dumpPageDataLeaf(writer, s, last, page, columnCount, entries); break; } // type 2 case Page.TYPE_DATA_NODE: { stat.pageTypeCount[type]++; int parentPageId = s.readInt(); setStorage(s.readVarInt()); int rowCount = s.readInt(); int entries = s.readShortInt(); writer.println("-- page " + page + ": data node " + (last ? "(last) " : "") + "parent: " + parentPageId + " table: " + storageId + " entries: " + entries + " rowCount: " + rowCount); dumpPageDataNode(writer, s, page, entries); break; } // type 3 case Page.TYPE_DATA_OVERFLOW: stat.pageTypeCount[type]++; writer.println("-- page " + page + ": data overflow " + (last ? "(last) " : "")); break; // type 4 case Page.TYPE_BTREE_LEAF: { stat.pageTypeCount[type]++; int parentPageId = s.readInt(); setStorage(s.readVarInt()); int entries = s.readShortInt(); writer.println("-- page " + page + ": b-tree leaf " + (last ? "(last) " : "") + "parent: " + parentPageId + " index: " + storageId + " entries: " + entries); if (trace) { dumpPageBtreeLeaf(writer, s, entries, !last); } break; } // type 5 case Page.TYPE_BTREE_NODE: stat.pageTypeCount[type]++; int parentPageId = s.readInt(); setStorage(s.readVarInt()); writer.println("-- page " + page + ": b-tree node " + (last ? "(last) " : "") + "parent: " + parentPageId + " index: " + storageId); dumpPageBtreeNode(writer, s, page, !last); break; // type 6 case Page.TYPE_FREE_LIST: stat.pageTypeCount[type]++; writer.println("-- page " + page + ": free list " + (last ? "(last)" : "")); stat.free += dumpPageFreeList(writer, s, page, pageCount); break; // type 7 case Page.TYPE_STREAM_TRUNK: stat.pageTypeCount[type]++; writer.println("-- page " + page + ": log trunk"); break; // type 8 case Page.TYPE_STREAM_DATA: stat.pageTypeCount[type]++; writer.println("-- page " + page + ": log data"); break; default: writer.println("-- ERROR page " + page + " unknown type " + type); break; } } catch (Exception e) { writeError(writer, e); } } private void dumpPageLogStream(PrintWriter writer, int logKey, int logFirstTrunkPage, int logFirstDataPage, long pageCount) throws IOException { Data s = Data.create(this, pageSize, false); DataReader in = new DataReader( new PageInputStream(writer, this, store, logKey, logFirstTrunkPage, logFirstDataPage, pageSize) ); writer.println("---- Transaction log ----"); CompressLZF compress = new CompressLZF(); while (true) { int x = in.readByte(); if (x < 0) { break; } if (x == PageLog.NOOP) { // ignore } else if (x == PageLog.UNDO) { int pageId = in.readVarInt(); int size = in.readVarInt(); byte[] data = new byte[pageSize]; if (size == 0) { in.readFully(data, pageSize); } else if (size == 1) { // empty } else { byte[] compressBuffer = new byte[size]; in.readFully(compressBuffer, size); try { compress.expand(compressBuffer, 0, size, data, 0, pageSize); } catch (ArrayIndexOutOfBoundsException e) { throw DbException.convertToIOException(e); } } String typeName = ""; int type = data[0]; boolean last = (type & Page.FLAG_LAST) != 0; type &= ~Page.FLAG_LAST; switch (type) { case Page.TYPE_EMPTY: typeName = "empty"; break; case Page.TYPE_DATA_LEAF: typeName = "data leaf " + (last ? "(last)" : ""); break; case Page.TYPE_DATA_NODE: typeName = "data node " + (last ? "(last)" : ""); break; case Page.TYPE_DATA_OVERFLOW: typeName = "data overflow " + (last ? "(last)" : ""); break; case Page.TYPE_BTREE_LEAF: typeName = "b-tree leaf " + (last ? "(last)" : ""); break; case Page.TYPE_BTREE_NODE: typeName = "b-tree node " + (last ? "(last)" : ""); break; case Page.TYPE_FREE_LIST: typeName = "free list " + (last ? "(last)" : ""); break; case Page.TYPE_STREAM_TRUNK: typeName = "log trunk"; break; case Page.TYPE_STREAM_DATA: typeName = "log data"; break; default: typeName = "ERROR: unknown type " + type; break; } writer.println("-- undo page " + pageId + " " + typeName); if (trace) { Data d = Data.create(null, data, false); dumpPage(writer, d, pageId, pageCount); } } else if (x == PageLog.ADD) { int sessionId = in.readVarInt(); setStorage(in.readVarInt()); Row row = PageLog.readRow(RowFactory.DEFAULT, in, s); writer.println("-- session " + sessionId + " table " + storageId + " + " + row.toString()); if (transactionLog) { if (storageId == 0 && row.getColumnCount() >= 4) { int tableId = (int) row.getKey(); String sql = row.getValue(3).getString(); String name = extractTableOrViewName(sql); if (row.getValue(2).getInt() == DbObject.TABLE_OR_VIEW) { tableMap.put(tableId, name); } writer.println(sql + ";"); } else { String tableName = tableMap.get(storageId); if (tableName != null) { StringBuilder builder = new StringBuilder(); builder.append("INSERT INTO ").append(tableName). append(" VALUES("); for (int i = 0; i < row.getColumnCount(); i++) { if (i > 0) { builder.append(", "); } row.getValue(i).getSQL(builder); } builder.append(");"); writer.println(builder.toString()); } } } } else if (x == PageLog.REMOVE) { int sessionId = in.readVarInt(); setStorage(in.readVarInt()); long key = in.readVarLong(); writer.println("-- session " + sessionId + " table " + storageId + " - " + key); if (transactionLog) { if (storageId == 0) { int tableId = (int) key; String tableName = tableMap.get(tableId); if (tableName != null) { writer.println("DROP TABLE IF EXISTS " + tableName + ";"); } } else { String tableName = tableMap.get(storageId); if (tableName != null) { String sql = "DELETE FROM " + tableName + " WHERE _ROWID_ = " + key + ";"; writer.println(sql); } } } } else if (x == PageLog.TRUNCATE) { int sessionId = in.readVarInt(); setStorage(in.readVarInt()); writer.println("-- session " + sessionId + " table " + storageId + " truncate"); if (transactionLog) { writer.println("TRUNCATE TABLE " + storageId); } } else if (x == PageLog.COMMIT) { int sessionId = in.readVarInt(); writer.println("-- commit " + sessionId); } else if (x == PageLog.ROLLBACK) { int sessionId = in.readVarInt(); writer.println("-- rollback " + sessionId); } else if (x == PageLog.PREPARE_COMMIT) { int sessionId = in.readVarInt(); String transaction = in.readString(); writer.println("-- prepare commit " + sessionId + " " + transaction); } else if (x == PageLog.NOOP) { // nothing to do } else if (x == PageLog.CHECKPOINT) { writer.println("-- checkpoint"); } else if (x == PageLog.FREE_LOG) { int size = in.readVarInt(); StringBuilder buff = new StringBuilder("-- free"); for (int i = 0; i < size; i++) { buff.append(' ').append(in.readVarInt()); } writer.println(buff); } else { writer.println("-- ERROR: unknown operation " + x); break; } } } private String setStorage(int storageId) { this.storageId = storageId; this.storageName = "O_" + Integer.toString(storageId).replace('-', 'M'); return storageName; } /** * An input stream that reads the data from a page store. */ static class PageInputStream extends InputStream { private final PrintWriter writer; private final FileStore store; private final Data page; private final int pageSize; private long trunkPage; private long nextTrunkPage; private long dataPage; private final IntArray dataPages = new IntArray(); private boolean endOfFile; private int remaining; private int logKey; public PageInputStream(PrintWriter writer, DataHandler handler, FileStore store, int logKey, long firstTrunkPage, long firstDataPage, int pageSize) { this.writer = writer; this.store = store; this.pageSize = pageSize; this.logKey = logKey - 1; this.nextTrunkPage = firstTrunkPage; this.dataPage = firstDataPage; page = Data.create(handler, pageSize, false); } @Override public int read() { byte[] b = { 0 }; int len = read(b); return len < 0 ? -1 : (b[0] & 255); } @Override public int read(byte[] b) { return read(b, 0, b.length); } @Override public int read(byte[] b, int off, int len) { if (len == 0) { return 0; } int read = 0; while (len > 0) { int r = readBlock(b, off, len); if (r < 0) { break; } read += r; off += r; len -= r; } return read == 0 ? -1 : read; } private int readBlock(byte[] buff, int off, int len) { fillBuffer(); if (endOfFile) { return -1; } int l = Math.min(remaining, len); page.read(buff, off, l); remaining -= l; return l; } private void fillBuffer() { if (remaining > 0 || endOfFile) { return; } while (dataPages.size() == 0) { if (nextTrunkPage == 0) { endOfFile = true; return; } trunkPage = nextTrunkPage; store.seek(trunkPage * pageSize); store.readFully(page.getBytes(), 0, pageSize); page.reset(); if (!PageStore.checksumTest(page.getBytes(), (int) trunkPage, pageSize)) { writer.println("-- ERROR: checksum mismatch page: " +trunkPage); endOfFile = true; return; } int t = page.readByte(); page.readShortInt(); if (t != Page.TYPE_STREAM_TRUNK) { writer.println("-- log eof " + trunkPage + " type: " + t + " expected type: " + Page.TYPE_STREAM_TRUNK); endOfFile = true; return; } page.readInt(); int key = page.readInt(); logKey++; if (key != logKey) { writer.println("-- log eof " + trunkPage + " type: " + t + " expected key: " + logKey + " got: " + key); } nextTrunkPage = page.readInt(); writer.println("-- log " + key + ":" + trunkPage + " next: " + nextTrunkPage); int pageCount = page.readShortInt(); for (int i = 0; i < pageCount; i++) { int d = page.readInt(); if (dataPage != 0) { if (d == dataPage) { dataPage = 0; } else { // ignore the pages before the starting page continue; } } dataPages.add(d); } } if (dataPages.size() > 0) { page.reset(); long nextPage = dataPages.get(0); dataPages.remove(0); store.seek(nextPage * pageSize); store.readFully(page.getBytes(), 0, pageSize); page.reset(); int t = page.readByte(); if (t != 0 && !PageStore.checksumTest(page.getBytes(), (int) nextPage, pageSize)) { writer.println("-- ERROR: checksum mismatch page: " +nextPage); endOfFile = true; return; } page.readShortInt(); int p = page.readInt(); int k = page.readInt(); writer.println("-- log " + k + ":" + trunkPage + "/" + nextPage); if (t != Page.TYPE_STREAM_DATA) { writer.println("-- log eof " +nextPage+ " type: " + t + " parent: " + p + " expected type: " + Page.TYPE_STREAM_DATA); endOfFile = true; return; } else if (k != logKey) { writer.println("-- log eof " +nextPage+ " type: " + t + " parent: " + p + " expected key: " + logKey + " got: " + k); endOfFile = true; return; } remaining = pageSize - page.length(); } } } private void dumpPageBtreeNode(PrintWriter writer, Data s, long pageId, boolean positionOnly) { int rowCount = s.readInt(); int entryCount = s.readShortInt(); int[] children = new int[entryCount + 1]; int[] offsets = new int[entryCount]; children[entryCount] = s.readInt(); checkParent(writer, pageId, children, entryCount); int empty = Integer.MAX_VALUE; for (int i = 0; i < entryCount; i++) { children[i] = s.readInt(); checkParent(writer, pageId, children, i); int off = s.readShortInt(); empty = Math.min(off, empty); offsets[i] = off; } empty = empty - s.length(); if (!trace) { return; } writer.println("-- empty: " + empty); for (int i = 0; i < entryCount; i++) { int off = offsets[i]; s.setPos(off); long key = s.readVarLong(); Value data; if (positionOnly) { data = ValueLong.get(key); } else { try { data = (Value)s.readValue(); } catch (Throwable e) { writeDataError(writer, "exception " + e, s.getBytes()); continue; } } writer.println("-- [" + i + "] child: " + children[i] + " key: " + key + " data: " + data); } writer.println("-- [" + entryCount + "] child: " + children[entryCount] + " rowCount: " + rowCount); } private int dumpPageFreeList(PrintWriter writer, Data s, long pageId, long pageCount) { int pagesAddressed = PageFreeList.getPagesAddressed(pageSize); int len = pagesAddressed >> 3; byte[] b = new byte[len]; s.read(b, 0, len); BitSet used = BitSet.valueOf(b); int free = 0; for (long i = 0, j = pageId; i < pagesAddressed && j < pageCount; i++, j++) { if (i == 0 || j % 100 == 0) { if (i > 0) { writer.println(); } writer.print("-- " + j + " "); } else if (j % 20 == 0) { writer.print(" - "); } else if (j % 10 == 0) { writer.print(' '); } writer.print(used.get((int) i) ? '1' : '0'); if (!used.get((int) i)) { free++; } } writer.println(); return free; } private void dumpPageBtreeLeaf(PrintWriter writer, Data s, int entryCount, boolean positionOnly) { int[] offsets = new int[entryCount]; int empty = Integer.MAX_VALUE; for (int i = 0; i < entryCount; i++) { int off = s.readShortInt(); empty = Math.min(off, empty); offsets[i] = off; } empty = empty - s.length(); writer.println("-- empty: " + empty); for (int i = 0; i < entryCount; i++) { int off = offsets[i]; s.setPos(off); long key = s.readVarLong(); Value data; if (positionOnly) { data = ValueLong.get(key); } else { try { data = (Value)s.readValue(); } catch (Throwable e) { writeDataError(writer, "exception " + e, s.getBytes()); continue; } } writer.println("-- [" + i + "] key: " + key + " data: " + data); } } private void checkParent(PrintWriter writer, long pageId, int[] children, int index) { int child = children[index]; if (child < 0 || child >= parents.length) { writer.println("-- ERROR [" + pageId + "] child[" + index + "]: " + child + " >= page count: " + parents.length); } else if (parents[child] != pageId) { writer.println("-- ERROR [" + pageId + "] child[" + index + "]: " + child + " parent: " + parents[child]); } } private void dumpPageDataNode(PrintWriter writer, Data s, long pageId, int entryCount) { int[] children = new int[entryCount + 1]; long[] keys = new long[entryCount]; children[entryCount] = s.readInt(); checkParent(writer, pageId, children, entryCount); for (int i = 0; i < entryCount; i++) { children[i] = s.readInt(); checkParent(writer, pageId, children, i); keys[i] = s.readVarLong(); } if (!trace) { return; } for (int i = 0; i < entryCount; i++) { writer.println("-- [" + i + "] child: " + children[i] + " key: " + keys[i]); } writer.println("-- [" + entryCount + "] child: " + children[entryCount]); } private void dumpPageDataLeaf(PrintWriter writer, Data s, boolean last, long pageId, int columnCount, int entryCount) { long[] keys = new long[entryCount]; int[] offsets = new int[entryCount]; long next = 0; if (!last) { next = s.readInt(); writer.println("-- next: " + next); } int empty = pageSize; for (int i = 0; i < entryCount; i++) { keys[i] = s.readVarLong(); int off = s.readShortInt(); empty = Math.min(off, empty); offsets[i] = off; } stat.pageDataRows += pageSize - empty; empty = empty - s.length(); stat.pageDataHead += s.length(); stat.pageDataEmpty += empty; if (trace) { writer.println("-- empty: " + empty); } if (!last) { Data s2 = Data.create(this, pageSize, false); s.setPos(pageSize); long parent = pageId; while (true) { checkParent(writer, parent, new int[]{(int) next}, 0); parent = next; seek(next); store.readFully(s2.getBytes(), 0, pageSize); s2.reset(); int type = s2.readByte(); s2.readShortInt(); s2.readInt(); if (type == (Page.TYPE_DATA_OVERFLOW | Page.FLAG_LAST)) { int size = s2.readShortInt(); writer.println("-- chain: " + next + " type: " + type + " size: " + size); s.checkCapacity(size); s.write(s2.getBytes(), s2.length(), size); break; } else if (type == Page.TYPE_DATA_OVERFLOW) { next = s2.readInt(); if (next == 0) { writeDataError(writer, "next:0", s2.getBytes()); break; } int size = pageSize - s2.length(); writer.println("-- chain: " + next + " type: " + type + " size: " + size + " next: " + next); s.checkCapacity(size); s.write(s2.getBytes(), s2.length(), size); } else { writeDataError(writer, "type: " + type, s2.getBytes()); break; } } } for (int i = 0; i < entryCount; i++) { long key = keys[i]; int off = offsets[i]; if (trace) { writer.println("-- [" + i + "] storage: " + storageId + " key: " + key + " off: " + off); } s.setPos(off); Value[] data = createRecord(writer, s, columnCount); if (data != null) { createTemporaryTable(writer); writeRow(writer, s, data); if (remove && storageId == 0) { String sql = data[3].getString(); if (sql.startsWith("CREATE USER ")) { int saltIndex = Utils.indexOf(s.getBytes(), "SALT ".getBytes(), off); if (saltIndex >= 0) { String userName = sql.substring("CREATE USER ".length(), sql.indexOf("SALT ") - 1); if (userName.startsWith("IF NOT EXISTS ")) { userName = userName.substring("IF NOT EXISTS ".length()); } if (userName.startsWith("\"")) { // TODO doesn't work for all cases ("" inside // user name) userName = userName.substring(1, userName.length() - 1); } byte[] userPasswordHash = SHA256.getKeyPasswordHash( userName, "".toCharArray()); byte[] salt = MathUtils.secureRandomBytes(Constants.SALT_LEN); byte[] passwordHash = SHA256.getHashWithSalt( userPasswordHash, salt); StringBuilder buff = new StringBuilder() .append("SALT '"); StringUtils.convertBytesToHex(buff, salt) .append("' HASH '"); StringUtils.convertBytesToHex(buff, passwordHash) .append('\''); byte[] replacement = buff.toString().getBytes(); System.arraycopy(replacement, 0, s.getBytes(), saltIndex, replacement.length); seek(pageId); store.write(s.getBytes(), 0, pageSize); if (trace) { out.println("User: " + userName); } remove = false; } } } } } } private void seek(long page) { // page is long to avoid integer overflow store.seek(page * pageSize); } private Value[] createRecord(PrintWriter writer, Data s, int columnCount) { recordLength = columnCount; if (columnCount <= 0) { writeDataError(writer, "columnCount<0", s.getBytes()); return null; } Value[] data; try { data = new Value[columnCount]; } catch (OutOfMemoryError e) { writeDataError(writer, "out of memory", s.getBytes()); return null; } return data; } private void writeRow(PrintWriter writer, Data s, Value[] data) { StringBuilder sb = new StringBuilder(); sb.append("INSERT INTO ").append(storageName).append(" VALUES("); for (valueId = 0; valueId < recordLength; valueId++) { try { Value v = (Value)s.readValue(); data[valueId] = v; if (valueId > 0) { sb.append(", "); } String columnName = storageName + "." + valueId; getSQL(sb, columnName, v); } catch (Exception e) { writeDataError(writer, "exception " + e, s.getBytes()); } catch (OutOfMemoryError e) { writeDataError(writer, "out of memory", s.getBytes()); } } sb.append(");"); writer.println(sb.toString()); if (storageId == 0) { try { SimpleRow r = new SimpleRow(data); MetaRecord meta = new MetaRecord(r); schema.add(meta); if (meta.getObjectType() == DbObject.TABLE_OR_VIEW) { String sql = data[3].getString(); String name = extractTableOrViewName(sql); tableMap.put(meta.getId(), name); } } catch (Throwable t) { writeError(writer, t); } } } private void resetSchema() { schema = new ArrayList<>(); objectIdSet = new HashSet<>(); tableMap = new HashMap<>(); columnTypeMap = new HashMap<>(); } private void writeSchemaSET(PrintWriter writer) { writer.println("---- Schema SET ----"); for (MetaRecord m : schema) { if (m.getObjectType() == DbObject.SETTING) { String sql = m.getSQL(); writer.println(sql + ";"); } } } private void writeSchema(PrintWriter writer) { writer.println("---- Schema ----"); Collections.sort(schema); for (MetaRecord m : schema) { if (m.getObjectType() != DbObject.SETTING && !isSchemaObjectTypeDelayed(m)) { // create, but not referential integrity constraints and so on // because they could fail on duplicate keys String sql = m.getSQL(); writer.println(sql + ";"); } } // first, copy the lob storage (if there is any) // must occur before copying data, // otherwise the lob storage may be overwritten boolean deleteLobs = false; for (Map.Entry<Integer, String> entry : tableMap.entrySet()) { Integer objectId = entry.getKey(); String name = entry.getValue(); if (objectIdSet.contains(objectId)) { if (isLobTable(name)) { setStorage(objectId); writer.println("DELETE FROM " + name + ";"); writer.println("INSERT INTO " + name + " SELECT * FROM " + storageName + ";"); if (name.equals("INFORMATION_SCHEMA.LOBS") || name.equalsIgnoreCase("\"INFORMATION_SCHEMA\".\"LOBS\"")) { writer.println("UPDATE " + name + " SET `TABLE` = " + LobStorageFrontend.TABLE_TEMP + ";"); deleteLobs = true; } } } } for (Map.Entry<Integer, String> entry : tableMap.entrySet()) { Integer objectId = entry.getKey(); String name = entry.getValue(); if (objectIdSet.contains(objectId)) { setStorage(objectId); if (isLobTable(name)) { continue; } writer.println("INSERT INTO " + name + " SELECT * FROM " + storageName + ";"); } } for (Integer objectId : objectIdSet) { setStorage(objectId); writer.println("DROP TABLE " + storageName + ";"); } writer.println("DROP ALIAS READ_BLOB;"); writer.println("DROP ALIAS READ_CLOB;"); writer.println("DROP ALIAS READ_BLOB_DB;"); writer.println("DROP ALIAS READ_CLOB_DB;"); if (deleteLobs) { writer.println("DELETE FROM INFORMATION_SCHEMA.LOBS WHERE `TABLE` = " + LobStorageFrontend.TABLE_TEMP + ";"); } for (MetaRecord m : schema) { if (isSchemaObjectTypeDelayed(m)) { String sql = m.getSQL(); writer.println(sql + ";"); } } } private static boolean isLobTable(String name) { return name.startsWith("INFORMATION_SCHEMA.LOB") || name.startsWith("\"INFORMATION_SCHEMA\".\"LOB") || name.startsWith("\"information_schema\".\"lob"); } private static boolean isSchemaObjectTypeDelayed(MetaRecord m) { switch (m.getObjectType()) { case DbObject.INDEX: case DbObject.CONSTRAINT: case DbObject.TRIGGER: return true; } return false; } private void createTemporaryTable(PrintWriter writer) { if (!objectIdSet.contains(storageId)) { objectIdSet.add(storageId); writer.write("CREATE TABLE "); writer.write(storageName); writer.write('('); for (int i = 0; i < recordLength; i++) { if (i > 0) { writer.print(", "); } writer.write('C'); writer.print(i); writer.write(' '); String columnType = columnTypeMap.get(storageName + "." + i); writer.write(columnType == null ? "VARCHAR" : columnType); } writer.println(");"); writer.flush(); } } private static String extractTableOrViewName(String sql) { int indexTable = sql.indexOf(" TABLE "); int indexView = sql.indexOf(" VIEW "); if (indexTable > 0 && indexView > 0) { if (indexTable < indexView) { indexView = -1; } else { indexTable = -1; } } if (indexView > 0) { sql = sql.substring(indexView + " VIEW ".length()); } else if (indexTable > 0) { sql = sql.substring(indexTable + " TABLE ".length()); } else { return "UNKNOWN"; } if (sql.startsWith("IF NOT EXISTS ")) { sql = sql.substring("IF NOT EXISTS ".length()); } boolean ignore = false; // sql is modified in the loop for (int i = 0; i < sql.length(); i++) { char ch = sql.charAt(i); if (ch == '\"') { ignore = !ignore; } else if (!ignore && (ch <= ' ' || ch == '(')) { sql = sql.substring(0, i); return sql; } } return "UNKNOWN"; } private static void closeSilently(FileStore fileStore) { if (fileStore != null) { fileStore.closeSilently(); } } private void writeError(PrintWriter writer, Throwable e) { if (writer != null) { writer.println("// error: " + e); } traceError("Error", e); } /** * INTERNAL */ @Override public String getDatabasePath() { return databaseName; } /** * INTERNAL */ @Override public FileStore openFile(String name, String mode, boolean mustExist) { return FileStore.open(this, name, "rw"); } /** * INTERNAL */ @Override public void checkPowerOff() { // nothing to do } /** * INTERNAL */ @Override public void checkWritingAllowed() { // nothing to do } /** * INTERNAL */ @Override public int getMaxLengthInplaceLob() { throw DbException.throwInternalError(); } /** * INTERNAL */ @Override public String getLobCompressionAlgorithm(int type) { return null; } /** * INTERNAL */ @Override public Object getLobSyncObject() { return this; } /** * INTERNAL */ @Override public SmallLRUCache<String, String[]> getLobFileListCache() { return null; } /** * INTERNAL */ @Override public TempFileDeleter getTempFileDeleter() { return TempFileDeleter.getInstance(); } /** * INTERNAL */ @Override public LobStorageBackend getLobStorage() { return null; } /** * INTERNAL */ @Override public int readLob(long lobId, byte[] hmac, long offset, byte[] buff, int off, int length) { throw DbException.throwInternalError(); } @Override public JavaObjectSerializer getJavaObjectSerializer() { return null; } @Override public CompareMode getCompareMode() { return CompareMode.getInstance(null, 0); } }
package eu.minemania.watson.scheduler; import eu.minemania.watson.Reference; import eu.minemania.watson.chat.ChatMessage; import eu.minemania.watson.config.Configs; import eu.minemania.watson.data.DataManager; import fi.dy.masa.malilib.interfaces.IClientTickHandler; import net.minecraft.client.Minecraft; public class ClientTickHandler implements IClientTickHandler { @Override public void onClientTick(Minecraft mc) { if (mc.world != null && mc.player != null) { SyncTaskQueue.getInstance().runTasks(); ChatMessage.getInstance().processServerChatQueue(); if(DataManager.getClientTickStartTime() != 0 && System.currentTimeMillis() - DataManager.getClientTickStartTime() > 1000) { ChatMessage.localOutputT("watson.message.join.watson", Reference.MOD_VERSION, Configs.Generic.WATSON_PREFIX.getStringValue(), true); ChatMessage.localOutputT("watson.message.join.plugin"); DataManager.setClientTick(0); } } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.greatwqs.mubian.servlet; import java.io.IOException; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.greatwqs.mubian.Constants; import com.greatwqs.mubian.bean.AboutUsBean; import com.greatwqs.mubian.dao.AboutUsDao; /** * * @author greatwqs * @create 2013-04-17 */ public class AboutContactUsServlet extends HttpServlet{ public static final long serialVersionUID = 1L; public static final String RETURN_PAGE = "AboutUs.jsp"; public static final String ERROR_PAGE = "error.jsp"; private void doService(HttpServletRequest request, HttpServletResponse resp) { try { AboutUsBean bean = AboutUsDao.getAboutUSBean(Constants.ABOUTUS_CONTACT_US); RequestDispatcher rd = request.getRequestDispatcher(RETURN_PAGE); request.setAttribute("AboutUSBean", bean); rd.forward(request, resp); } catch (ServletException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } } @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { doService(req, resp); } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { doService(req, resp); } @Override public void destroy() { super.destroy(); } @Override public void init() throws ServletException { super.init(); } }
package com.covid.backend.controller.doctor; import com.covid.backend.service.doctor.CreateDoctorService; import com.covid.backend.utils.Result; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; @RestController public class CreateDoctor { @Autowired public CreateDoctorService createDoctorService; @GetMapping("/doctor/createDoctor") public Result createDoctor(@RequestParam(value = "name", required = false) String name, @RequestParam(value = "gender", required = false) String gender, @RequestParam(value = "birthday", required = false) String birthday, @RequestParam(value = "department", required = false) String department, @RequestParam(value = "hospital_id") String hospital_id) { //TODO login check int affectedLine = createDoctorService.createDoctor(name, gender, birthday, department, hospital_id); return Result.ok(affectedLine); } }
package com.googlecode.totallylazy.transducers; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import static com.googlecode.totallylazy.transducers.State.Continue; import static com.googlecode.totallylazy.transducers.State.Stop; public class ListSubject<T> implements Subject<T> { protected final List<Receiver<T>> receivers = new CopyOnWriteArrayList<>(); @Override public AutoCloseable send(Receiver<T> receiver) { receivers.add(receiver); return EMPTY_CLOSEABLE; } @Override public State start() { for (Receiver<T> receiver : receivers) { if (receiver.start().equals(Stop)) receivers.remove(receiver); } return receivers.isEmpty() ? Stop : Continue; } @Override public State next(T item) { for (Receiver<T> receiver : receivers) { if (receiver.next(item).equals(Stop)) receivers.remove(receiver); } return receivers.isEmpty() ? Stop : Continue; } @Override public State error(Throwable throwable) { for (Receiver<T> receiver : receivers) { if (receiver.error(throwable).equals(Stop)) receivers.remove(receiver); } return receivers.isEmpty() ? Stop : Continue; } @Override public void finish() { for (Receiver<T> receiver : receivers) { receiver.finish(); } } }
package me.Wundero.Ray.config; /* The MIT License (MIT) Copyright (c) 2016 Wundero Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Consumer; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.TextTemplate; import org.spongepowered.api.text.TextTemplate.Arg; import com.google.common.reflect.TypeToken; import me.Wundero.Ray.framework.format.context.FormatContext; import me.Wundero.Ray.framework.format.location.FormatLocation; import me.Wundero.Ray.utils.Utils; import ninja.leaping.configurate.ConfigurationNode; import ninja.leaping.configurate.objectmapping.ObjectMappingException; /** * Configuration template. Applies to the config as it is built. */ public class Template { private ConfigurationNode node = null; private Template(ConfigurationNode node) { this.node = node; } /** * Start building a new template. */ public static Template.Builder builder(ConfigurationNode node) { return new Builder(node); } // builder automatically applies to config instead of creating // groups/formats without important info public static class Builder { private Template template; ConfigurationNode groupNode = null; Builder(ConfigurationNode node) { template = new Template(node); groupNode = template.node.getNode("worlds", "all", "groups"); } /** * Build the template. */ public Template build() { return template; } /** * Add a new group */ public GroupBuilder withGroup(String name) { return new GroupBuilder(groupNode.getNode(name), name, this); } public static class GroupBuilder { private ConfigurationNode node; @SuppressWarnings("unused") private String name; private int priority = 0; private String permission = ""; private List<String> parents = Utils.al(); private Builder parent; GroupBuilder(ConfigurationNode node, String name, Builder parent) { this.parent = parent; this.node = node; this.name = name; } /** * Build the group */ public Builder build() { node.getNode("priority").setValue(priority); if (!permission.isEmpty()) { node.getNode("permission").setValue(permission); } if (!parents.isEmpty()) { node.getNode("parents").setValue(parents); } return parent; } /** * Set the priority of the group */ public GroupBuilder withPriority(int priority) { this.priority = priority; return this; } /** * Set the permission of the group */ public GroupBuilder withPermission(String permission) { if (permission == null) { return this; } this.permission = permission; return this; } /** * Add a parent group */ public GroupBuilder withParent(String... parent) { for (String s : parent) { parents.add(s); } return this; } /** * Add a default format */ public GroupBuilder withFormat(DefaultFormat f) { return f.applyTo(this); } /** * Create a new format */ public FormatBuilder withFormat(String name) { return new FormatBuilder(node.getNode("formats", name), name, this); } public static class FormatBuilder { private ConfigurationNode node; @SuppressWarnings("unused") private String name; private Optional<FormatContext> type = Optional.empty(); private Optional<FormatLocation> loc = Optional.empty(); private Optional<Consumer<ConfigurationNode>> locdataset = Optional.empty(); private GroupBuilder parent; private TextTemplate template; private Map<Arg, String> clicks = Utils.hm(); private Map<Arg, String> hovers = Utils.hm(); FormatBuilder(ConfigurationNode node, String name, GroupBuilder parent) { this.node = node; this.name = name; this.parent = parent; template = TextTemplate.of(); } /** * Build the format */ public GroupBuilder build() { try { node.getNode("format").setValue(TypeToken.of(TextTemplate.class), template); ConfigurationNode args = node.getNode("args"); for (Arg a : clicks.keySet()) { args.getNode(a.getName(), "click").setValue(clicks.get(a)); } for (Arg a : hovers.keySet()) { args.getNode(a.getName(), "hover").setValue(hovers.get(a)); } if (type.isPresent()) { node.getNode("context").setValue(type.get().getName()); } if (loc.isPresent()) { System.out.println("loc: " + loc.get().getName()); node.getNode("location").setValue(loc.get().getName()); if (locdataset.isPresent()) { locdataset.get().accept(node.getNode("location-data")); } } } catch (ObjectMappingException e) { Utils.printError(e); } return parent; } /** * Add a context */ public FormatBuilder withType(FormatContext type) { this.type = Optional.ofNullable(type); return this; } /** * Add an argument */ public FormatBuilder withArg(String key) { return withArg(key, null, null); } /** * Add an argument */ public FormatBuilder withArg(String key, boolean optional) { return withArg(key, null, null); } /** * Add an argument */ public FormatBuilder withArg(String key, String click, String hover) { return withArg(key, click, hover, false); } /** * Add an argument */ public FormatBuilder withArg(String key, String click, String hover, boolean optional) { return withArg(TextTemplate.arg(key), click, hover, optional); } /** * Add an argument */ public FormatBuilder withArg(DefaultArg arg) { return withArg(arg.getBuilder(), arg.getClick(), arg.getHover(), arg.isOptional()); } /** * Add an argument */ public FormatBuilder withArg(Arg.Builder builder) { return withArg(builder, null, null); } /** * Add an argument */ public FormatBuilder withArg(Arg.Builder builder, boolean optional) { return withArg(builder, null, null, optional); } /** * Add an argument */ public FormatBuilder withArg(Arg.Builder builder, String click, String hover) { return withArg(builder, click, hover, false); } /** * Add an argument */ public FormatBuilder withArg(Arg.Builder builder, String click, String hover, boolean optional) { builder.optional(optional); Arg built = builder.build(); if (click != null) { clicks.put(built, click); } if (hover != null) { hovers.put(built, hover); } template = template.concat(TextTemplate.of(builder.build())); return this; } /** * Add some text */ public FormatBuilder withText(String... texts) { for (String s : texts) { template = template.concat(TextTemplate.of(s)); } return this; } /** * Add some text */ public FormatBuilder withText(Text... texts) { for (Text t : texts) { template = template.concat(TextTemplate.of(t)); } return this; } /** * Set location data for the location */ public FormatBuilder withLocData(Consumer<ConfigurationNode> task) { this.locdataset = Utils.wrap(task); return this; } /** * Set the location for this format */ public FormatBuilder withLoc(FormatLocation loc) { this.loc = Utils.wrap(loc); return this; } /** * Set the node for this format */ public void setNode(ConfigurationNode node) { this.node = node; } /** * Get the node for this format */ public ConfigurationNode getNode() { return node; } } } } }
/* * * Copyright (c) 2017 Otávio Santana and others * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Apache License v2.0 is available at http://www.opensource.org/licenses/apache2.0.php. * * You may elect to redistribute this code under either of these licenses. * * Contributors: * * Otavio Santana * */ package org.jnosql.diana.couchdb.document; import org.jnosql.diana.api.Sort; import org.jnosql.diana.api.document.DocumentCondition; import org.jnosql.diana.api.document.DocumentQuery; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import static org.jnosql.diana.couchdb.document.CouchDBConstant.BOOKMARK; /** * A CouchDB specialization of {@link DocumentQuery} that allows query with bookmark which can do pagination. * * @see CouchDBDocumentQuery#of(DocumentQuery) * @see CouchDBDocumentQuery#of(DocumentQuery, String) */ public final class CouchDBDocumentQuery implements DocumentQuery { private final DocumentQuery query; private String bookmark; private CouchDBDocumentQuery(DocumentQuery query) { this.query = query; } /** * The A string that enables you to specify which page of results you require. Used for paging * through result sets. Every query returns an opaque string under the bookmark key that can * then be passed back in a query to get the next page of results. If any part of the selector query * changes between requests, the results are undefined. Optional, default: null * * @return the bookmark */ public Optional<String> getBookmark() { return Optional.ofNullable(bookmark); } void setBookmark(Map<String, Object> json) { json.computeIfPresent(BOOKMARK, (k, v) -> this.bookmark = v.toString()); } @Override public long getLimit() { return query.getLimit(); } @Override public long getSkip() { return query.getSkip(); } @Override public String getDocumentCollection() { return query.getDocumentCollection(); } @Override public Optional<DocumentCondition> getCondition() { return query.getCondition(); } @Override public List<Sort> getSorts() { return query.getSorts(); } @Override public List<String> getDocuments() { return query.getDocuments(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } CouchDBDocumentQuery that = (CouchDBDocumentQuery) o; return Objects.equals(query, that.query) && Objects.equals(bookmark, that.bookmark); } @Override public int hashCode() { return Objects.hash(query, bookmark); } @Override public String toString() { final StringBuilder sb = new StringBuilder("CouchDBDocumentQuery{"); sb.append("query=").append(query); sb.append(", bookmark='").append(bookmark).append('\''); sb.append('}'); return sb.toString(); } /** * returns a new instance of {@link CouchDBDocumentQuery} * * @param query the {@link DocumentQuery} * @return a new instance * @throws NullPointerException when query is null */ public static CouchDBDocumentQuery of(DocumentQuery query) { Objects.requireNonNull(query, "query is required "); return new CouchDBDocumentQuery(query); } /** * returns a new instance of {@link CouchDBDocumentQuery} * * @param query the {@link DocumentQuery} * @param bookmark {@link CouchDBDocumentQuery#bookmark} * @return a new instance * @throws NullPointerException when there is null parameter */ public static CouchDBDocumentQuery of(DocumentQuery query, String bookmark) { Objects.requireNonNull(query, "query is required "); Objects.requireNonNull(bookmark, "bookmark is required "); CouchDBDocumentQuery couchDBDocumentQuery = new CouchDBDocumentQuery(query); couchDBDocumentQuery.bookmark = bookmark; return couchDBDocumentQuery; } }
/** * BSD-style license; for more info see http://pmd.sourceforge.net/license.html */ package net.sourceforge.pmd; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import java.io.File; import org.junit.Test; import junit.framework.JUnit4TestAdapter; public class RuleContextTest { @Test public void testReport() { RuleContext ctx = new RuleContext(); assertEquals(0, ctx.getReport().size()); Report r = new Report(); ctx.setReport(r); Report r2 = ctx.getReport(); assertEquals("report object mismatch", r, r2); } @Test public void testSourceCodeFilename() { RuleContext ctx = new RuleContext(); assertEquals("filename should be empty", "", ctx.getSourceCodeFilename()); ctx.setSourceCodeFile(new File("dir/foo.java")); assertEquals("filename mismatch", "foo.java", ctx.getSourceCodeFilename()); } @Test public void testSourceCodeFile() { RuleContext ctx = new RuleContext(); assertNull("file should be null", ctx.getSourceCodeFile()); ctx.setSourceCodeFile(new File("somefile.java")); assertEquals("filename mismatch", new File("somefile.java"), ctx.getSourceCodeFile()); } @Test public void testAttributes() { RuleContext ctx1 = new RuleContext(); Object obj1 = new Object(); Object obj2 = new Object(); assertNull("attribute should be null", ctx1.getAttribute("attribute")); boolean set = ctx1.setAttribute("attribute", obj1); assertTrue("attribute should have been set", set); assertNotNull("attribute should not be null", ctx1.getAttribute("attribute")); assertSame("attribute should be expected instance", ctx1.getAttribute("attribute"), obj1); set = ctx1.setAttribute("attribute", obj2); assertFalse("attribute should not have been set", set); assertSame("attribute should be expected instance", ctx1.getAttribute("attribute"), obj1); Object value = ctx1.removeAttribute("attribute"); assertSame("attribute value should be expected instance", value, obj1); assertNull("attribute should be null", ctx1.getAttribute("attribute")); } @Test public void testSharedAttributes() { RuleContext ctx1 = new RuleContext(); RuleContext ctx2 = new RuleContext(ctx1); StringBuilder obj1 = new StringBuilder(); StringBuilder obj2 = new StringBuilder(); ctx1.setAttribute("attribute1", obj1); ctx2.setAttribute("attribute2", obj2); assertNotNull("attribute should not be null", ctx1.getAttribute("attribute1")); assertNotNull("attribute should not be null", ctx1.getAttribute("attribute2")); assertNotNull("attribute should not be null", ctx2.getAttribute("attribute1")); assertNotNull("attribute should not be null", ctx2.getAttribute("attribute2")); assertSame("attribute should be expected instance", ctx1.getAttribute("attribute1"), obj1); assertSame("attribute should be expected instance", ctx1.getAttribute("attribute2"), obj2); assertSame("attribute should be expected instance", ctx2.getAttribute("attribute1"), obj1); assertSame("attribute should be expected instance", ctx2.getAttribute("attribute2"), obj2); ctx1.removeAttribute("attribute1"); assertNull("attribute should be null", ctx1.getAttribute("attribute1")); assertNull("attribute should be null", ctx2.getAttribute("attribute1")); assertNotNull("attribute should not be null", ctx1.getAttribute("attribute2")); assertNotNull("attribute should not be null", ctx2.getAttribute("attribute2")); StringBuilder value = (StringBuilder) ctx1.getAttribute("attribute2"); assertEquals("attribute value should be empty", "", value.toString()); value.append("x"); StringBuilder value1 = (StringBuilder) ctx1.getAttribute("attribute2"); assertEquals("attribute value should be 'x'", "x", value1.toString()); StringBuilder value2 = (StringBuilder) ctx2.getAttribute("attribute2"); assertEquals("attribute value should be 'x'", "x", value2.toString()); } public static junit.framework.Test suite() { return new JUnit4TestAdapter(RuleContextTest.class); } }
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.protocols.raft.partition.impl; import com.google.common.base.Preconditions; import io.atomix.cluster.MemberId; import io.atomix.cluster.messaging.ClusterMessagingService; import io.atomix.primitive.session.SessionId; import io.atomix.protocols.raft.protocol.CloseSessionRequest; import io.atomix.protocols.raft.protocol.CloseSessionResponse; import io.atomix.protocols.raft.protocol.CommandRequest; import io.atomix.protocols.raft.protocol.CommandResponse; import io.atomix.protocols.raft.protocol.HeartbeatRequest; import io.atomix.protocols.raft.protocol.HeartbeatResponse; import io.atomix.protocols.raft.protocol.KeepAliveRequest; import io.atomix.protocols.raft.protocol.KeepAliveResponse; import io.atomix.protocols.raft.protocol.MetadataRequest; import io.atomix.protocols.raft.protocol.MetadataResponse; import io.atomix.protocols.raft.protocol.OpenSessionRequest; import io.atomix.protocols.raft.protocol.OpenSessionResponse; import io.atomix.protocols.raft.protocol.PublishRequest; import io.atomix.protocols.raft.protocol.QueryRequest; import io.atomix.protocols.raft.protocol.QueryResponse; import io.atomix.protocols.raft.protocol.RaftClientProtocol; import io.atomix.protocols.raft.protocol.ResetRequest; import io.atomix.utils.serializer.Serializer; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import java.util.function.Consumer; import java.util.function.Function; /** * Raft client protocol that uses a cluster communicator. */ public class RaftClientCommunicator implements RaftClientProtocol { private final RaftMessageContext context; private final Serializer serializer; private final ClusterMessagingService clusterCommunicator; public RaftClientCommunicator(Serializer serializer, ClusterMessagingService clusterCommunicator) { this(null, serializer, clusterCommunicator); } public RaftClientCommunicator(String prefix, Serializer serializer, ClusterMessagingService clusterCommunicator) { this.context = new RaftMessageContext(prefix); this.serializer = Preconditions.checkNotNull(serializer, "serializer cannot be null"); this.clusterCommunicator = Preconditions.checkNotNull(clusterCommunicator, "clusterCommunicator cannot be null"); } private <T, U> CompletableFuture<U> sendAndReceive(String subject, T request, MemberId memberId) { return clusterCommunicator.send(subject, request, serializer::encode, serializer::decode, memberId); } @Override public CompletableFuture<OpenSessionResponse> openSession(MemberId memberId, OpenSessionRequest request) { return sendAndReceive(context.openSessionSubject, request, memberId); } @Override public CompletableFuture<CloseSessionResponse> closeSession(MemberId memberId, CloseSessionRequest request) { return sendAndReceive(context.closeSessionSubject, request, memberId); } @Override public CompletableFuture<KeepAliveResponse> keepAlive(MemberId memberId, KeepAliveRequest request) { return sendAndReceive(context.keepAliveSubject, request, memberId); } @Override public CompletableFuture<QueryResponse> query(MemberId memberId, QueryRequest request) { return sendAndReceive(context.querySubject, request, memberId); } @Override public CompletableFuture<CommandResponse> command(MemberId memberId, CommandRequest request) { return sendAndReceive(context.commandSubject, request, memberId); } @Override public CompletableFuture<MetadataResponse> metadata(MemberId memberId, MetadataRequest request) { return sendAndReceive(context.metadataSubject, request, memberId); } @Override public void registerHeartbeatHandler(Function<HeartbeatRequest, CompletableFuture<HeartbeatResponse>> handler) { clusterCommunicator.subscribe(context.heartbeatSubject, serializer::decode, handler, serializer::encode); } @Override public void unregisterHeartbeatHandler() { clusterCommunicator.unsubscribe(context.heartbeatSubject); } @Override public void reset(Set<MemberId> members, ResetRequest request) { clusterCommunicator.multicast(context.resetSubject(request.session()), request, serializer::encode, members); } @Override public void registerPublishListener(SessionId sessionId, Consumer<PublishRequest> listener, Executor executor) { clusterCommunicator.subscribe(context.publishSubject(sessionId.id()), serializer::decode, listener, executor); } @Override public void unregisterPublishListener(SessionId sessionId) { clusterCommunicator.unsubscribe(context.publishSubject(sessionId.id())); } }
package com.hasindu.userdetailsmanagamentsystem.service; import com.hasindu.userdetailsmanagamentsystem.dto.LoginHistoryDTO; import org.springframework.stereotype.Service; import java.util.List; /** *Login History Service */ public interface LoginHistoryService { List<LoginHistoryDTO> getAllLoginHistories(); void addLoginHistory(LoginHistoryDTO loginHistoryDTO); List<LoginHistoryDTO> findUserHistoriesByUserName(String userName); }
package com.iut.james.dao; import com.iut.james.beans.Command; import com.iut.james.beans.User; import org.springframework.data.jpa.repository.JpaRepository; import java.util.List; public interface CommandRepository extends JpaRepository<Command, Integer> { List<Command> findAll(); List<Command> findAllByIdUserOrderByDate(User user); }
package cn.leiyy.modules.system.service.impl; import com.baomidou.mybatisplus.mapper.EntityWrapper; import com.baomidou.mybatisplus.mapper.Wrapper; import cn.leiyy.common.exception.BizExceptionEnum; import cn.leiyy.common.exception.BussinessException; import cn.leiyy.modules.system.dao.DictDao; import cn.leiyy.modules.system.service.IDictService; import cn.leiyy.common.persistence.dao.DictMapper; import cn.leiyy.common.persistence.model.Dict; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import javax.annotation.Resource; import java.util.List; import java.util.Map; import static cn.leiyy.common.constant.factory.MutiStrFactory.*; @Service @Transactional public class DictServiceImpl implements IDictService { @Resource DictDao dictDao; @Resource DictMapper dictMapper; @Override public void addDict(String dictName, String dictValues) { //判断有没有该字典 List<Dict> dicts = dictMapper.selectList(new EntityWrapper<Dict>().eq("name", dictName).and().eq("pid", 0)); if(dicts != null && dicts.size() > 0){ throw new BussinessException(BizExceptionEnum.DICT_EXISTED); } //解析dictValues List<Map<String, String>> items = parseKeyValue(dictValues); //添加字典 Dict dict = new Dict(); dict.setName(dictName); dict.setNum(0); dict.setPid(0); this.dictMapper.insert(dict); //添加字典条目 for (Map<String, String> item : items) { String num = item.get(MUTI_STR_KEY); String name = item.get(MUTI_STR_VALUE); Dict itemDict = new Dict(); itemDict.setPid(dict.getId()); itemDict.setName(name); try { itemDict.setNum(Integer.valueOf(num)); }catch (NumberFormatException e){ throw new BussinessException(BizExceptionEnum.DICT_MUST_BE_NUMBER); } this.dictMapper.insert(itemDict); } } @Override public void editDict(Integer dictId, String dictName, String dicts) { //删除之前的字典 this.delteDict(dictId); //重新添加新的字典 this.addDict(dictName,dicts); } @Override public void delteDict(Integer dictId) { //删除这个字典的子词典 Wrapper<Dict> dictEntityWrapper = new EntityWrapper<>(); dictEntityWrapper = dictEntityWrapper.eq("pid", dictId); dictMapper.delete(dictEntityWrapper); //删除这个词典 dictMapper.deleteById(dictId); } }
package org.usfirst.frc.team2832.robot.commands; import org.usfirst.frc.team2832.robot.Robot; import edu.wpi.first.wpilibj.command.Command; public class TurnGyro extends Command { double startAngle; double targetAngle; int finishedCounts; final double TARGET_ANGLE = 2.0; final int GOAL_COUNTS = 12; //each count is 20ms public TurnGyro(double targAngle) { // Use requires() here to declare subsystem dependencies requires(Robot.DriveTrain); targetAngle = targAngle; } // Called just before this Command runs the first time @Override protected void initialize() { startAngle = Robot.DriveTrain.getGyroAngle(); finishedCounts = 0; } // Called repeatedly when this Command is scheduled to run @Override protected void execute() { double turn; //start with amount of error turn = (Robot.DriveTrain.getGyroAngle() - startAngle) - targetAngle; //compensate (P = 0.01 here, so 1 degree of error = 0.01% motor command (40* off = 40% motor command)) turn = turn * 0.01; //range check the value to make it between -0.4 to 0.4 turn = Math.min(Math.max(-0.4, turn), 0.4); Robot.DriveTrain.arcadeDrive(0, turn); } // Make this return true when this Command no longer needs to run execute() @Override protected boolean isFinished() { double error = Math.abs((Robot.DriveTrain.getGyroAngle() - startAngle) - targetAngle); if(error < TARGET_ANGLE) { if (finishedCounts > GOAL_COUNTS) { //met our goal, exit return true; } else { //not at target counts yet, increment to get there finishedCounts++; return false; } } else { //not in range, reset exit counts finishedCounts = 0; return false; } } // Called once after isFinished returns true @Override protected void end() { //stop the motors when leaving Robot.DriveTrain.arcadeDrive(0, 0); } // Called when another command which requires one or more of the same // subsystems is scheduled to run @Override protected void interrupted() { } }
package com.conveyal.data.census; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.util.stream.Stream; /** * Import data from the US Census into a seamless store in S3 or on disk. */ public class CensusLoader { protected static final Logger LOG = LoggerFactory.getLogger(CensusLoader.class); public static void main (String... args) throws Exception { File indir = new File(args[0]); File tiger = new File(indir, "tiger"); ShapeDataStore store = new ShapeDataStore(); // load up the tiger files in parallel LOG.info("Loading TIGER (geometry)"); Stream.of(tiger.listFiles()) .filter(f -> f.getName().endsWith(".shp")) .forEach(f -> { LOG.info("Loading file {}", f); TigerLineSource src = new TigerLineSource(f); try { src.load(store); } catch (Exception e) { throw new RuntimeException(e); } }); LOG.info("TIGER done"); LOG.info("Loading LODES workforce data"); File workforce = new File(indir, "workforce"); Stream.of(workforce.listFiles()) .filter(f -> f.getName().endsWith(".csv.gz")) .forEach(f -> { LOG.info("Loading file {}", f); try { new LodesSource(f, LodesSource.LodesType.RESIDENCE).load(store); } catch (Exception e) { throw new RuntimeException(e); } }); LOG.info("Workforce done"); LOG.info("Loading LODES jobs data"); File jobs = new File(indir, "jobs"); Stream.of(jobs.listFiles()) .filter(f -> f.getName().endsWith(".csv.gz")) .forEach(f -> { LOG.info("Loading file {}", f); try { new LodesSource(f, LodesSource.LodesType.WORKPLACE).load(store); } catch (Exception e) { throw new RuntimeException(e); } }); LOG.info("Jobs done"); if (args.length == 1) store.writeTiles(new File(indir, "tiles")); else // write to s3 store.writeTilesToS3(args[1]); store.close(); } }
/* * Copyright (C) 2014 Indeed Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package com.indeed.lsmtree.core; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.AbstractIterator; import com.google.common.io.CountingOutputStream; import com.google.common.io.LittleEndianDataInputStream; import com.google.common.io.LittleEndianDataOutputStream; import com.indeed.util.core.io.Closeables2; import com.indeed.util.core.reference.SharedReference; import com.indeed.util.core.shell.PosixFileOperations; import com.indeed.util.io.BufferedFileDataOutputStream; import com.indeed.util.mmap.Memory; import com.indeed.util.mmap.MemoryDataInput; import com.indeed.util.mmap.MMapBuffer; import com.indeed.util.serialization.LongSerializer; import com.indeed.util.serialization.Serializer; import it.unimi.dsi.fastutil.chars.CharArrayList; import org.apache.commons.collections.comparators.ComparableComparator; import org.apache.log4j.Logger; import javax.annotation.Nullable; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.DataInput; import java.io.DataOutput; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.util.Comparator; import java.util.Iterator; /** * @author jplaisance */ public final class ImmutableBTreeIndex { private static final Logger log = Logger.getLogger(ImmutableBTreeIndex.class); public static final class Writer { //no reason to instantiate this ever private Writer() {} public static <K, V> void write( File file, Iterator<Generation.Entry<K,V>> iterator, Serializer<K> keySerializer, Serializer<V> valueSerializer, final int blocksize, boolean keepDeletions ) throws IOException { if (blocksize > 65536) throw new IllegalArgumentException("block size must be less than 65536"); file.mkdirs(); final BufferedFileDataOutputStream fileOut = new BufferedFileDataOutputStream(new File(file, "index.bin")); final CountingOutputStream out = new CountingOutputStream(fileOut); //tempFile is deleted in writeIndex final File tempFile = File.createTempFile("tmp", ".bin"); final WriteLevelResult result = writeLevel(out, tempFile, iterator, keySerializer, valueSerializer, blocksize, keepDeletions); final int tmpCount = result.tmpCount; final long size = result.size; final long valueLevelLength = out.getCount(); final Header header = writeIndex(out, tempFile, tmpCount, keySerializer, blocksize); header.valueLevelLength = valueLevelLength; header.size = size; header.hasDeletions = keepDeletions; new HeaderSerializer().write(header, new LittleEndianDataOutputStream(out)); fileOut.sync(); out.close(); } private static Header writeIndex( final CountingOutputStream counter, File tempFile, int tmpCount, Serializer keySerializer, int blocksize ) throws IOException { if (tmpCount == 0) { tempFile.delete(); final Header header = new Header(); header.indexLevels = 0; header.rootLevelStartAddress = 0; header.fileLength = Header.length(); return header; } TempFileIterator tmpIterator = new TempFileIterator(tempFile, tmpCount, keySerializer); int indexLevels = 0; final LongSerializer longSerializer = new LongSerializer(); while (true) { final long levelStart = counter.getCount(); indexLevels++; final File nextTempFile = File.createTempFile("tmp", ".bin"); final WriteLevelResult result = writeLevel(counter, nextTempFile, tmpIterator, keySerializer, longSerializer, blocksize, false); tmpCount = result.tmpCount; tempFile.delete(); if (tmpCount <= 1) { nextTempFile.delete(); final Header header = new Header(); header.indexLevels = indexLevels; header.rootLevelStartAddress = levelStart; header.fileLength = counter.getCount()+Header.length(); return header; } else { tempFile = nextTempFile; tmpIterator = new TempFileIterator(tempFile, tmpCount, keySerializer); } } } private static <K,V> WriteLevelResult writeLevel( final CountingOutputStream counter, final File tempFile, final Iterator<Generation.Entry<K,V>> iterator, final Serializer<K> keySerializer, final Serializer<V> valueSerializer, final int blocksize, final boolean keepDeletions ) throws IOException { Generation.Entry<K,V> next; if (!iterator.hasNext()) { return new WriteLevelResult(0, 0); } next = iterator.next(); final LittleEndianDataOutputStream tmpOut = new LittleEndianDataOutputStream(new BufferedOutputStream(new FileOutputStream(tempFile), 131072)); final ByteArrayOutputStream buffer = new ByteArrayOutputStream(); final LittleEndianDataOutputStream bufferDataOutput = new LittleEndianDataOutputStream(buffer); final ByteArrayOutputStream currentBlock = new ByteArrayOutputStream(blocksize); final CharArrayList keyOffsets = new CharArrayList(); int tmpCount = 0; boolean done = false; final LittleEndianDataOutputStream out = new LittleEndianDataOutputStream(counter); long count = 0; outer: while (!done) { currentBlock.reset(); keyOffsets.clear(); if (!keepDeletions) { while (next.isDeleted()) { if (!iterator.hasNext()) break outer; next = iterator.next(); } } keySerializer.write(next.getKey(), tmpOut); tmpOut.writeLong(counter.getCount()); tmpCount++; while (true) { buffer.reset(); final boolean skipDeleted = updateBuffer(next, keySerializer, valueSerializer, keepDeletions, bufferDataOutput); if (4+2*keyOffsets.size()+2+currentBlock.size()+buffer.size() > blocksize) { if (currentBlock.size() == 0) { throw new IllegalArgumentException("key value pair is greater than block size"); } break; } if (!skipDeleted) { keyOffsets.add((char)currentBlock.size()); buffer.writeTo(currentBlock); count++; } if (!iterator.hasNext()) { done = true; break; } next = iterator.next(); } if (keyOffsets.size() > 0) { final long start = counter.getCount(); out.writeInt(keyOffsets.size()); for (int i = 0; i < keyOffsets.size(); i++) { out.writeChar(keyOffsets.getChar(i)); } currentBlock.writeTo(out); if (counter.getCount()-start > blocksize) { log.error("too big"); } } } tmpOut.close(); return new WriteLevelResult(tmpCount, count); } private static final class WriteLevelResult { final int tmpCount; final long size; private WriteLevelResult(final int tmpCount, final long size) { this.tmpCount = tmpCount; this.size = size; } } private static<K,V> boolean updateBuffer( final Generation.Entry<K,V> entry, Serializer<K> keySerializer, Serializer<V> valueSerializer, final boolean keepDeletions, final DataOutput bufferDataOutput ) throws IOException { final boolean skipDeleted; if (keepDeletions) { skipDeleted = false; keySerializer.write(entry.getKey(), bufferDataOutput); if (entry.isDeleted()) { bufferDataOutput.writeByte(1); } else { bufferDataOutput.writeByte(0); valueSerializer.write(entry.getValue(), bufferDataOutput); } } else { if (entry.isDeleted()) { skipDeleted = true; } else { skipDeleted = false; keySerializer.write(entry.getKey(), bufferDataOutput); valueSerializer.write(entry.getValue(), bufferDataOutput); } } return skipDeleted; } } private static final class TempFileIterator<K> implements Iterator<Generation.Entry<K, Long>> { private final LittleEndianDataInputStream in; private final int tmpCount; private final Serializer<K> keySerializer; private final LongSerializer longSerializer = new LongSerializer(); private int i = 0; public TempFileIterator( File tempFile, int tmpCount, Serializer<K> keySerializer ) throws FileNotFoundException { this.tmpCount = tmpCount; this.keySerializer = keySerializer; in = new LittleEndianDataInputStream(new BufferedInputStream(new FileInputStream(tempFile), 131072)); } @Override public boolean hasNext() { if (i < tmpCount) return true; Closeables2.closeQuietly(in, log); return false; } @Override public Generation.Entry<K, Long> next() { i++; try { final K key = keySerializer.read(in); final Long value = longSerializer.read(in); return Generation.Entry.create(key, value); } catch (IOException e) { throw new RuntimeException(e); } } @Override public void remove() { throw new UnsupportedOperationException(); } } public static final class Reader<K, V> implements Generation<K,V>, Closeable { private final MMapBuffer buffer; private final Level<K, V> rootLevel; private final long rootLevelStartAddress; private final boolean hasDeletions; private final long sizeInBytes; private final long size; private final File indexFile; private final Comparator<K> comparator; private final SharedReference<Closeable> stuffToClose; public Reader(File file, Serializer<K> keySerializer, Serializer<V> valueSerializer, final boolean mlockFiles) throws IOException { this(file, new ComparableComparator(), keySerializer, valueSerializer, mlockFiles); } public Reader(File file, Comparator<K> comparator, Serializer<K> keySerializer, Serializer<V> valueSerializer, final boolean mlockFiles) throws IOException { this.comparator = comparator; indexFile = new File(file, "index.bin"); sizeInBytes = indexFile.length(); buffer = new MMapBuffer(indexFile, FileChannel.MapMode.READ_ONLY, ByteOrder.LITTLE_ENDIAN); try { stuffToClose = SharedReference.create((Closeable)buffer); final MemoryDataInput in = new MemoryDataInput(buffer.memory()); if (sizeInBytes < Header.length()) { throw new IOException("file is less than header length bytes"); } final byte[] headerBytes = new byte[Header.length()]; in.seek(sizeInBytes - Header.length()); in.readFully(headerBytes); final LittleEndianDataInputStream headerStream = new LittleEndianDataInputStream(new ByteArrayInputStream(headerBytes)); final Header header = new HeaderSerializer().read(headerStream); hasDeletions = header.hasDeletions; size = header.size; if (header.fileLength != sizeInBytes) { log.error(header.fileLength); throw new IOException("file length written to last 8 bytes of file does not match file length, file is inconsistent"); } rootLevel = Level.build(buffer.memory(), keySerializer, valueSerializer, comparator, header.hasDeletions, header.indexLevels); rootLevelStartAddress = header.rootLevelStartAddress; if (mlockFiles) buffer.mlock(0, buffer.memory().length()); } catch (Throwable t) { Closeables2.closeQuietly(buffer, log); Throwables.propagateIfInstanceOf(t, IOException.class); throw Throwables.propagate(t); } } private Block<K,V> rootBlock() { return new Block<K, V>(null, 0, rootLevel, rootLevel.getBlock(rootLevelStartAddress)); } @Nullable public Entry<K, V> get(K key) { try { final Block<K, V> valueBlock = rootBlock().getValueBlock(key); if (valueBlock == null) return null; return valueBlock.get(key); } catch (InternalError e) { throw new RuntimeException("file "+indexFile.getAbsolutePath()+" length is currently less than MMapBuffer length, it has been modified after open. this is a huge problem.", e); } } @Override public @Nullable Boolean isDeleted(final K key) { final Entry<K,V> entry = get(key); return entry == null ? null : (entry.isDeleted() ? Boolean.TRUE : Boolean.FALSE); } @Nullable public Entry<K, V> lower(K key) throws IOException { return neighbor(key, lower); } @Nullable public Entry<K, V> floor(K key) throws IOException { return neighbor(key, floor); } @Nullable public Entry<K, V> ceil(K key) throws IOException { return neighbor(key, ceil); } @Nullable public Entry<K, V> higher(K key) throws IOException { return neighbor(key, higher); } @Nullable private Entry<K, V> neighbor(K key, NeighborModifier modifier) throws IOException { try { final Block<K, V> valueBlock = rootBlock().getValueBlock(key); if (valueBlock == null) return null; return valueBlock.neighbor(key, modifier); } catch (InternalError e) { throw new IOException("file "+indexFile.getAbsolutePath()+" length is currently less than MMapBuffer length, it has been modified after open. this is a huge problem.", e); } } public Entry<K, V> first() throws IOException { try { final Block valueBlock = rootBlock().getFirstValueBlock(); return valueBlock.dataBlock.getEntry(0); } catch (InternalError e) { throw new IOException("file "+indexFile.getAbsolutePath()+" length is currently less than MMapBuffer length, it has been modified after open. this is a huge problem.", e); } } public Entry<K, V> last() throws IOException { try { final Block valueBlock = rootBlock().getLastValueBlock(); return valueBlock.dataBlock.getEntry(valueBlock.length()-1); } catch (InternalError e) { throw new IOException("file "+indexFile.getAbsolutePath()+" length is currently less than MMapBuffer length, it has been modified after open. this is a huge problem.", e); } } @Override public Generation<K, V> head(K end, boolean inclusive) { return new FilteredGeneration<K, V>(this, stuffToClose.copy(), null, false, end, inclusive); } @Override public Generation<K, V> tail(K start, boolean inclusive) { return new FilteredGeneration<K, V>(this, stuffToClose.copy(), start, inclusive, null, false); } @Override public Generation<K, V> slice(K start, boolean startInclusive, K end, boolean endInclusive) { return new FilteredGeneration<K, V>(this, stuffToClose.copy(), start, startInclusive, end, endInclusive); } @Override public Generation<K, V> reverse() { return new ReverseGeneration<K, V>(this, stuffToClose.copy()); } @Override public Iterator<Entry<K, V>> iterator() { return iterator(null, false); } @Override public Iterator<Entry<K, V>> iterator(final @Nullable K start, final boolean startInclusive) { return new AbstractIterator<Entry<K, V>>() { Block<K,V> current = null; int currentIndex; @Override protected Entry<K, V> computeNext() { if (current == null) { final Block<K,V> rootBlock = rootBlock(); if (rootBlock == null) { return endOfData(); } if (start != null) { current = rootBlock.getValueBlock(start); if (current != null) { final int insertionPoint = current.search(start); if (insertionPoint >= 0) { currentIndex = startInclusive ? insertionPoint : insertionPoint + 1; } else { currentIndex = ~insertionPoint; } } } if (current == null) { current = rootBlock.getFirstValueBlock(); currentIndex = 0; } } if (currentIndex >= current.length()) { current = current.nextBlock(); if (current == null) { return endOfData(); } currentIndex = 0; } final Entry<K,V> ret = current.getEntry(currentIndex); currentIndex++; return ret; } }; } @Override public Iterator<Entry<K, V>> reverseIterator() { return reverseIterator(null, false); } @Override public Iterator<Entry<K, V>> reverseIterator(final @Nullable K start, final boolean startInclusive) { return new AbstractIterator<Entry<K, V>>() { Block current = null; int currentIndex; @Override protected Entry<K, V> computeNext() { if (current == null) { final Block<K,V> rootBlock = rootBlock(); if (rootBlock == null) { return endOfData(); } if (start == null) { current = rootBlock.getLastValueBlock(); currentIndex = current.length()-1; } else { current = rootBlock.getValueBlock(start); if (current == null) { return endOfData(); } final int insertionPoint = current.search(start); if (insertionPoint >= 0) { currentIndex = startInclusive ? insertionPoint : insertionPoint - 1; } else { currentIndex = (~insertionPoint)-1; } } } if (currentIndex < 0) { current = current.previousBlock(); if (current == null) { return endOfData(); } currentIndex = current.length()-1; } final Entry<K,V> ret = current.getEntry(currentIndex); currentIndex--; return ret; } }; } @Override public Comparator<K> getComparator() { return comparator; } @Override public File getPath() { return indexFile; } @Override public void checkpoint(File checkpointPath) throws IOException { PosixFileOperations.cplr(indexFile, checkpointPath); } @Override public void delete() throws IOException { indexFile.delete(); } public long sizeInBytes() { return sizeInBytes; } public long size() { return size; } @Override public void close() throws IOException { stuffToClose.close(); } public boolean hasDeletions() { return hasDeletions; } private static final class Block<K,V> { final Block<K,V> parent; final int parentPosition; final Level<K,V> level; final Level<K,V>.DataBlock dataBlock; Block(@Nullable Block<K, V> parent, int parentPosition, Level<K, V> level, Level.DataBlock dataBlock) { this.parent = parent; this.parentPosition = parentPosition; this.level = level; this.dataBlock = dataBlock; } boolean isValueLevel() { return level.isValueLevel(); } @Nullable Block<K,V> getChildBlock(int index) { final Level<K, V> nextLevel = Preconditions.checkNotNull(level.nextLevel); if (index >= dataBlock.length()) { if (index == dataBlock.length()) { final Block<K,V> nextBlock = nextBlock(); return nextBlock == null ? null : nextBlock.getChildBlock(0); } else { throw new RuntimeException(); } } if (index < 0) { if (index == -1) { final Block<K,V> previousBlock = previousBlock(); return previousBlock == null ? null : previousBlock.getChildBlock(previousBlock.dataBlock.length-1); } else { throw new RuntimeException(); } } final Long address = (Long) dataBlock.getEntry(index).getValue(); return new Block(this, index, nextLevel, nextLevel.getBlock(address)); } int length() { return dataBlock.length(); } @Nullable Block<K,V> nextBlock() { return parent == null ? null : parent.getChildBlock(parentPosition+1); } @Nullable Block<K,V> previousBlock() { return parent == null ? null : parent.getChildBlock(parentPosition-1); } @Nullable Block<K,V> getContainingBlock(K key) { final Level<K, V> nextLevel = Preconditions.checkNotNull(level.nextLevel); final int floorIndex = neighborIndex(key, floor); final SearchResult<K, Long> searchResult = getSearchResult(floorIndex); return searchResult.match(new SearchResult.Matcher<K, Long, Block<K,V>>() { Block<K, V> found(final Entry<K, Long> floor) { return new Block(Block.this, floorIndex, nextLevel, nextLevel.getBlock(floor.getValue())); } @Nullable Block<K, V> low() { return null; } }); } @Nullable Block<K,V> getValueBlock(K key) { if (isValueLevel()) return this; final Block<K, V> containingBlock = getContainingBlock(key); if (containingBlock == null) return null; return containingBlock.getValueBlock(key); } Block<K,V> getFirstValueBlock() { if (isValueLevel()) return this; final Block<K, V> childBlock = Preconditions.checkNotNull(getChildBlock(0)); return childBlock.getFirstValueBlock(); } Block<K,V> getLastValueBlock() { if (isValueLevel()) return this; final Block<K, V> childBlock = Preconditions.checkNotNull(getChildBlock(length() - 1)); return childBlock.getLastValueBlock(); } @Nullable Entry<K,V> get(K key) { if (!level.isValueLevel()) throw new RuntimeException(); return (Entry<K,V>)dataBlock.get(key); } int neighborIndex(K key, NeighborModifier modifier) { final int insertionPoint = dataBlock.search(key); return insertionPoint >= 0 ? insertionPoint + modifier.addFound : (~insertionPoint) + modifier.addNotFound; } @Nullable <A> Entry<K,A> neighbor(final K key, final NeighborModifier modifier) { final SearchResult<K, A> lowerEntry = getSearchResult(neighborIndex(key, modifier)); return lowerEntry.match(new SearchResult.Matcher<K, A, Entry<K, A>>() { Entry<K, A> found(final Entry<K, A> entry) { return entry; } @Nullable Entry<K, A> low() { final Block<K, V> previousBlock = previousBlock(); return previousBlock == null ? null : previousBlock.<A>neighbor(key, modifier); } @Nullable Entry<K, A> high() { final Block<K, V> nextBlock = nextBlock(); return nextBlock == null ? null : nextBlock.<A>neighbor(key, modifier); } }); } <A> SearchResult<K,A> getSearchResult(final int neighborIndex) { if (neighborIndex < 0) return Low.low(); if (neighborIndex >= dataBlock.length()) return High.high(); return new Found(dataBlock.getEntry(neighborIndex)); } public K getKey(int index) { if (!level.isValueLevel()) throw new RuntimeException(); return dataBlock.getKey(index); } public Entry<K,V> getEntry(int index) { if (!level.isValueLevel()) throw new RuntimeException(); return (Entry<K,V>)dataBlock.getEntry(index); } int search(K key) { return dataBlock.search(key); } } private static interface SearchResult<K,V> { public <Z> Z match(Matcher<K,V,Z> m); static abstract class Matcher<K,V,Z> { Z found(Entry<K,V> entry) {return otherwise();} Z low() {return otherwise();} Z high() {return otherwise();} Z otherwise() {throw new UnsupportedOperationException();} } } private static final class Found<K,V> implements SearchResult<K,V> { final Entry<K,V> entry; private Found(final Entry<K, V> entry) { this.entry = entry; } public <Z> Z match(final Matcher<K,V,Z> m) { return m.found(entry); } } private static final class Low<K,V> implements SearchResult<K,V> { static <K,V> Low<K,V> low() {return low;} static final Low low = new Low(); public <Z> Z match(final Matcher<K, V, Z> m) { return m.low(); } } private static final class High<K,V> implements SearchResult<K,V> { static <K,V> High<K,V> high() {return high;} static final High high = new High(); public <Z> Z match(final Matcher<K, V, Z> m) { return m.high(); } } private static final class NeighborModifier { final int addFound; final int addNotFound; private NeighborModifier(final int addFound, final int addNotFound) { this.addFound = addFound; this.addNotFound = addNotFound; } } private static final NeighborModifier lower = new NeighborModifier(-1, -1); private static final NeighborModifier floor = new NeighborModifier(0, -1); private static final NeighborModifier ceil = new NeighborModifier(0, 0); private static final NeighborModifier higher = new NeighborModifier(1, 0); private static final class Level<K, V> { final Memory memory; final Level<K, V> nextLevel; final Serializer<K> keySerializer; final Serializer valueSerializer; final Comparator<K> comparator; final boolean hasDeletions; static <K, V> Level<K, V> build(Memory memory, Serializer<K> keySerializer, Serializer<V> valueSerializer, Comparator<K> comparator, boolean hasDeletions, int numLevels) { if (numLevels == 0) { return new Level<K, V>(memory, null, keySerializer, valueSerializer, comparator, hasDeletions); } else { return new Level<K, V>(memory, build(memory, keySerializer, valueSerializer, comparator, hasDeletions, numLevels - 1), keySerializer, new LongSerializer(), comparator, false); } } Level(Memory memory, @Nullable Level<K, V> nextLevel, Serializer<K> keySerializer, Serializer valueSerializer, Comparator<K> comparator, boolean hasDeletions) { this.memory = memory; this.nextLevel = nextLevel; this.keySerializer = keySerializer; this.valueSerializer = valueSerializer; this.comparator = comparator; this.hasDeletions = hasDeletions; } boolean isValueLevel() { return nextLevel == null; } DataBlock getBlock(long address) { return new DataBlock(address); } final class DataBlock { final int length; final long offsetStart; final long kvStart; final MemoryDataInput in; DataBlock(final long blockStart) { length = memory.getInt(blockStart); this.offsetStart = blockStart+4; kvStart = offsetStart+2*length; in = new MemoryDataInput(memory); } K getKey(int index) { final int offset = memory.getChar(offsetStart+2*index); in.seek(kvStart+offset); try { return keySerializer.read(in); } catch (IOException e) { throw Throwables.propagate(e); } } Entry<K, Object> getEntry(int index) { final int offset = memory.getChar(offsetStart+2*index); in.seek(kvStart+offset); try { final K key = keySerializer.read(in); final boolean isDeleted = hasDeletions && in.readBoolean(); if (isDeleted) { return Entry.createDeleted(key); } else { return Entry.create(key, valueSerializer.read(in)); } } catch (IOException e) { throw Throwables.propagate(e); } } int length() { return length; } @Nullable Entry<K, Object> get(K key) { final int insertionPoint = search(key); if (insertionPoint >= 0) { return getEntry(insertionPoint); } return null; } int search(K key) { int low = 0; int high = length-1; while (low <= high) { final int mid = (low + high) >>> 1; final K midVal = getKey(mid); final int cmp = comparator.compare(midVal, key); if (cmp < 0) { low = mid + 1; } else if (cmp > 0) { high = mid - 1; } else { return mid; } } return ~low; } } } } private static class HeaderSerializer implements Serializer<Header> { @Override public void write(Header header, final DataOutput out) throws IOException { out.writeInt(header.indexLevels); out.writeLong(header.rootLevelStartAddress); out.writeLong(header.valueLevelLength); out.writeLong(header.size); out.writeByte(header.hasDeletions ? 1 : 0); out.writeLong(header.fileLength); } @Override public Header read(final DataInput in) throws IOException { final int indexLevels = in.readInt(); final long rootLevelStartAddress = in.readLong(); final long valueLevelLength = in.readLong(); final long size = in.readLong(); final boolean hasDeletions = in.readByte() != 0; final long fileLength = in.readLong(); return new Header(indexLevels, rootLevelStartAddress, valueLevelLength, size, hasDeletions, fileLength); } } private static class Header { int indexLevels; long rootLevelStartAddress; long valueLevelLength; long size; boolean hasDeletions; long fileLength; private Header() { } private Header( final int indexLevels, final long rootLevelStartAddress, final long valueLevelLength, final long size, final boolean hasDeletions, final long fileLength ) { this.indexLevels = indexLevels; this.rootLevelStartAddress = rootLevelStartAddress; this.valueLevelLength = valueLevelLength; this.size = size; this.hasDeletions = hasDeletions; this.fileLength = fileLength; } public static int length() { return 37; } @Override public String toString() { return "Header{" + "indexLevels=" + indexLevels + ", rootLevelStartAddress=" + rootLevelStartAddress + ", valueLevelLength=" + valueLevelLength + ", size=" + size + ", hasDeletions=" + hasDeletions + ", fileLength=" + fileLength + '}'; } } }
package za.co.neildutoit.jSatisfactorySaveLoader.game.buildable.factory; import za.co.neildutoit.jSatisfactorySaveLoader.game.SaveObjectClass; @SaveObjectClass("/Game/FactoryGame/Buildable/Factory/GeneratorCoal/Build_GeneratorCoal.Build_GeneratorCoal_C") public class GeneratorCoal extends FGBuildableGeneratorFuel { }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.refactoring.introduce.variable; import com.intellij.psi.PsiElement; import com.intellij.psi.util.PsiTreeUtil; import com.jetbrains.python.PyBundle; import com.jetbrains.python.psi.PyStatement; import com.jetbrains.python.refactoring.introduce.IntroduceHandler; import com.jetbrains.python.refactoring.introduce.IntroduceOperation; import org.jetbrains.annotations.NotNull; import java.util.List; /** * @author Alexey.Ivanov */ public class PyIntroduceVariableHandler extends IntroduceHandler { public PyIntroduceVariableHandler() { super(new VariableValidator(), PyBundle.message("refactoring.introduce.variable.dialog.title")); } @Override protected PsiElement addDeclaration(@NotNull final PsiElement expression, @NotNull final PsiElement declaration, @NotNull IntroduceOperation operation) { return doIntroduceVariable(expression, declaration, operation.getOccurrences(), operation.isReplaceAll()); } public static PsiElement doIntroduceVariable(PsiElement expression, PsiElement declaration, List<? extends PsiElement> occurrences, boolean replaceAll) { PsiElement anchor = replaceAll ? findAnchor(occurrences) : PsiTreeUtil.getParentOfType(expression, PyStatement.class); assert anchor != null; final PsiElement parent = anchor.getParent(); return parent.addBefore(declaration, anchor); } @Override protected String getHelpId() { return "refactoring.introduceVariable"; } @Override protected String getRefactoringId() { return "refactoring.python.introduce.variable"; } }
package com.sun.corba.se.spi.activation; /** * com/sun/corba/se/spi/activation/EndPointInfoHolder.java . * Generated by the IDL-to-Java compiler (portable), version "3.2" * from c:/re/workspace/8-2-build-windows-amd64-cygwin/jdk8u66/5298/corba/src/share/classes/com/sun/corba/se/spi/activation/activation.idl * Monday, November 9, 2015 10:52:06 AM PST */ public final class EndPointInfoHolder implements org.omg.CORBA.portable.Streamable { public com.sun.corba.se.spi.activation.EndPointInfo value = null; public EndPointInfoHolder () { } public EndPointInfoHolder (com.sun.corba.se.spi.activation.EndPointInfo initialValue) { value = initialValue; } public void _read (org.omg.CORBA.portable.InputStream i) { value = com.sun.corba.se.spi.activation.EndPointInfoHelper.read (i); } public void _write (org.omg.CORBA.portable.OutputStream o) { com.sun.corba.se.spi.activation.EndPointInfoHelper.write (o, value); } public org.omg.CORBA.TypeCode _type () { return com.sun.corba.se.spi.activation.EndPointInfoHelper.type (); } }
/* Copyright 2020. Huawei Technologies Co., Ltd. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.huawei.hms.cordova.location.helpers; import android.location.Location; public interface ResultHandler { /** * A function to handle Location update results. * @param location Location object */ void handleResult(Location location); }
package cn.zealon.notes.common.result; /** * * http 状态码 * ---------------------------------------------------------------------------- * 200 OK - [GET]:服务器成功返回用户请求的数据,该操作是幂等的(Idempotent)。 * 400 INVALID REQUEST - [POST/PUT/PATCH]:用户发出的请求有错误,服务器没有进行新建或修改数据的操作,该操作是幂等的。 * 401 Unauthorized - [*]:表示用户没有权限(令牌、用户名、密码错误)。 * 403 Forbidden - [*] 表示用户得到授权(与401错误相对),但是访问是被禁止的。 * 404 NOT FOUND - [*]:用户发出的请求针对的是不存在的记录,服务器没有进行操作,该操作是幂等的。 * 406 Not Acceptable - [GET]:用户请求的格式不可得(比如用户请求JSON格式,但是只有XML格式)。 * 410 Gone -[GET]:用户请求的资源被永久删除,且不会再得到的。 * 422 Unprocesable entity - [POST/PUT/PATCH] 当创建一个对象时,发生一个验证错误。 * 500 INTERNAL SERVER ERROR - [*]:服务器发生错误,用户将无法判断发出的请求是否成功。 * 600 UN_KNOW_ERROR - 未知错误 * ---------------------------------------------------------------------------- */ public enum HttpCodeEnum { OK(200,"操作成功"), INVALID_REQUEST(400,"参数错误"), UNAUTHORIZED(401,"没有权限"), FORBIDDEN(403,"禁止访问"), NOT_FOUND(404,"资源不存在"), NOT_ACCEPTABLE(406,"请求的格式不正确"), GONE(410,"数据被删除"), UNPROCESABLE_ENTITY(422,"参数验证错误"), INTERNAL_SERVER_ERROR(500,"服务器发生错误"), UN_KNOW_ERROR(500,"未知错误"), FAIL(501,"操作失败"), VERIFICATION_FAILED(1000, "业务逻辑验证未通过"), AUTH_EXPIRED(3000,"认证到期"), AUTH_PWD_ERROR(3001,"用户名或密码错误"), AUTH_USER_DISABLED(3002,"用户被禁用"), AUTH_USER_LOCKED(3003,"用户被锁定"); private final int code; private final String message; HttpCodeEnum(final int code, final String message){ this.code=code; this.message=message; } public String getMessage() { return message; } public int getCode() { return code; } }
/* * Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tencentcloudapi.as.v20180419.models; import com.tencentcloudapi.common.AbstractModel; import com.google.gson.annotations.SerializedName; import com.google.gson.annotations.Expose; import java.util.HashMap; public class ModifyLoadBalancerTargetAttributesRequest extends AbstractModel{ /** * 伸缩组ID */ @SerializedName("AutoScalingGroupId") @Expose private String AutoScalingGroupId; /** * 需修改目标规则属性的应用型负载均衡器列表,列表长度上限为50 */ @SerializedName("ForwardLoadBalancers") @Expose private ForwardLoadBalancer [] ForwardLoadBalancers; /** * Get 伸缩组ID * @return AutoScalingGroupId 伸缩组ID */ public String getAutoScalingGroupId() { return this.AutoScalingGroupId; } /** * Set 伸缩组ID * @param AutoScalingGroupId 伸缩组ID */ public void setAutoScalingGroupId(String AutoScalingGroupId) { this.AutoScalingGroupId = AutoScalingGroupId; } /** * Get 需修改目标规则属性的应用型负载均衡器列表,列表长度上限为50 * @return ForwardLoadBalancers 需修改目标规则属性的应用型负载均衡器列表,列表长度上限为50 */ public ForwardLoadBalancer [] getForwardLoadBalancers() { return this.ForwardLoadBalancers; } /** * Set 需修改目标规则属性的应用型负载均衡器列表,列表长度上限为50 * @param ForwardLoadBalancers 需修改目标规则属性的应用型负载均衡器列表,列表长度上限为50 */ public void setForwardLoadBalancers(ForwardLoadBalancer [] ForwardLoadBalancers) { this.ForwardLoadBalancers = ForwardLoadBalancers; } public ModifyLoadBalancerTargetAttributesRequest() { } /** * NOTE: Any ambiguous key set via .set("AnyKey", "value") will be a shallow copy, * and any explicit key, i.e Foo, set via .setFoo("value") will be a deep copy. */ public ModifyLoadBalancerTargetAttributesRequest(ModifyLoadBalancerTargetAttributesRequest source) { if (source.AutoScalingGroupId != null) { this.AutoScalingGroupId = new String(source.AutoScalingGroupId); } if (source.ForwardLoadBalancers != null) { this.ForwardLoadBalancers = new ForwardLoadBalancer[source.ForwardLoadBalancers.length]; for (int i = 0; i < source.ForwardLoadBalancers.length; i++) { this.ForwardLoadBalancers[i] = new ForwardLoadBalancer(source.ForwardLoadBalancers[i]); } } } /** * Internal implementation, normal users should not use it. */ public void toMap(HashMap<String, String> map, String prefix) { this.setParamSimple(map, prefix + "AutoScalingGroupId", this.AutoScalingGroupId); this.setParamArrayObj(map, prefix + "ForwardLoadBalancers.", this.ForwardLoadBalancers); } }
package studio.thevipershow.systeminfo.commands; import java.util.Collections; import studio.thevipershow.systeminfo.enums.Messages; import studio.thevipershow.systeminfo.oshi.SystemValues; import studio.thevipershow.systeminfo.utils.Utils; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; public final class CommandLscpu extends Command { private final SystemValues values = SystemValues.getInstance(); public CommandLscpu() { super("lscpu", "get information about the system processor(s)", "/<command>", Collections.emptyList()); } @Override public boolean execute(CommandSender sender, String name, String[] args) { if (sender.hasPermission("systeminfo.commands.lscpu")) { if (args.length == 0) { printLscpu(sender); return true; } else { sender.sendMessage(Messages.OUT_OF_ARGS.value(true)); } } else { sender.sendMessage(Messages.NO_PERMISSIONS.value(true)); } return false; } private void printLscpu(CommandSender sender) { sender.sendMessage(Utils.color("&2«« &7Cpu info &2»»")); sender.sendMessage(Utils.color("&7Operating System: &a" + values.getOSFamily() + " " + values.getOSManufacturer() + " " + values.getOSVersion())); sender.sendMessage(Utils.color("&7Cpu Vendor: &a" + values.getCpuVendor())); sender.sendMessage(Utils.color("&7Cpu Model: &a" + values.getCpuModel() + " " + values.getCpuModelName())); sender.sendMessage(Utils.color("&7Cpu Clock Rate: &a" + values.getCpuMaxFrequency())); sender.sendMessage(Utils.color("&7Cpu Stepping: &a" + values.getCpuStepping())); sender.sendMessage(Utils.color("&7Physical Cores: &a" + values.getCpuCores())); sender.sendMessage(Utils.color("&7Logical Cores: &a" + values.getCpuThreads())); } }
/** * Licensed to Apereo under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright ownership. Apereo * licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the License at the * following location: * * <p>http://www.apache.org/licenses/LICENSE-2.0 * * <p>Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package org.apereo.portal.groups; import java.util.Collections; import org.apereo.portal.concurrency.IEntityLock; /** * Extends <code>EntityGroupImpl</code> to make it lockable for writing. * * <p> */ public class LockableEntityGroupImpl extends EntityGroupImpl implements ILockableEntityGroup { protected IEntityLock lock; /** * LockableEntityGroupImpl constructor. * * @param groupKey java.lang.String * @param groupType java.lang.Class * @exception GroupsException */ public LockableEntityGroupImpl(String groupKey, Class groupType) throws GroupsException { super(groupKey, groupType); } /** Delegates to the factory. */ @Override public void delete() throws GroupsException { getLockableGroupService().deleteGroup(this); } /** @return org.apereo.portal.concurrency.IEntityLock */ @Override public IEntityLock getLock() { return lock; } /** @return org.apereo.portal.groups.ILockableGroupService */ protected ILockableGroupService getLockableGroupService() throws GroupsException { return (ILockableGroupService) super.getLocalGroupService(); } /** * Ask the service to update this group (in the store), update the back-pointers of the updated * members, and force the retrieval of containing groups in case the memberships of THIS group * have changed during the time the group has been locked. */ private void primUpdate(boolean renewLock) throws GroupsException { getLockableGroupService().updateGroup(this, renewLock); clearPendingUpdates(); this.invalidateInParentGroupsCache(Collections.singleton((IGroupMember) this)); } /** * Ask the service to update this group (in the store), update the back-pointers of the updated * members, and force the retrieval of containing groups in case the memberships of THIS group * have changed during the time the group has been locked. */ private void primUpdateMembers(boolean renewLock) throws GroupsException { getLockableGroupService().updateGroupMembers(this, renewLock); clearPendingUpdates(); this.invalidateInParentGroupsCache(Collections.singleton((IGroupMember) this)); } /** @param newLock org.apereo.portal.concurrency.IEntityLock */ @Override public void setLock(IEntityLock newLock) { lock = newLock; } @Override public String toString() { return "LockableEntityGroupImpl (" + getKey() + ") " + getName(); } @Override public void update() throws GroupsException { primUpdate(false); } @Override public void updateAndRenewLock() throws GroupsException { primUpdate(true); } @Override public void updateMembers() throws GroupsException { primUpdateMembers(false); } }
package in.mcxiv.ai.convnet.layers.loss; import in.mcxiv.ai.convnet.DoubleBuffer; import in.mcxiv.ai.convnet.Vol; import in.mcxiv.ai.convnet.net.Layer; import in.mcxiv.ai.convnet.net.VP; import in.mcxiv.annotations.VPConstructor; import java.util.ArrayList; import static in.mcxiv.ai.convnet.Util.zeros; public class RegressionLayer extends Layer { public static final String LAYER_TAG = "regression"; @VPConstructor( tag = LAYER_TAG, required = "int num_neurons" ) public RegressionLayer(VP opt) { super(opt); if (opt == null) opt = new VP(); // computed this.num_inputs = opt.getInt("in_sx") * opt.getInt("in_sy") * opt.getInt("in_depth"); this.out_depth = this.num_inputs; this.out_sx = 1; this.out_sy = 1; this.layer_type = "regression"; } @Override public Vol forward(Vol V, boolean is_training) { this.in_act = V; this.out_act = V; return V; // identity function } @Override public double backward(Object inp) { // compute and accumulate gradient wrt weights and bias of this layer Vol x = this.in_act; x.dw = zeros(x.w.size); // zero out the gradient of input Vol double loss = 0.0; if (inp instanceof Integer) { // lets hope that only one number is being regressed int y = (Integer) inp; double dy = x.w.get(0) - y; x.dw.set(0, dy); loss += 0.5 * dy * dy; } else if (inp instanceof DoubleBuffer) { DoubleBuffer y = (DoubleBuffer) inp; for (int i = 0; i < this.out_depth; i++) { double dy = x.w.get(i) - y.get(i); x.dw.set(i, dy); loss += 0.5 * dy * dy; } } else if (inp instanceof DoubleBuffer[]) { throw new IllegalStateException(); // // assume it is a struct with entries .dim and .val // // and we pass gradient only along dimension dim to be equal to val // var i = y.dim; // var yi = y.val; // var dy = x.w[i] - yi; // x.dw[i] = dy; // loss += 0.5*dy*dy; } else throw new IllegalStateException(); return loss; } @Override public ArrayList<VP> getParamsAndGrads() { return new ArrayList<>(); } }
package com.huaweicloud.sdk.ecs.v2.model; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.function.Consumer; /** * */ public class NovaSecurityGroup { @JsonInclude(JsonInclude.Include.NON_NULL) @JsonProperty(value = "description") private String description; @JsonInclude(JsonInclude.Include.NON_NULL) @JsonProperty(value = "id") private String id; @JsonInclude(JsonInclude.Include.NON_NULL) @JsonProperty(value = "name") private String name; @JsonInclude(JsonInclude.Include.NON_NULL) @JsonProperty(value = "tenant_id") private String tenantId; @JsonInclude(JsonInclude.Include.NON_NULL) @JsonProperty(value = "rules") private List<NovaSecurityGroupCommonRule> rules = null; public NovaSecurityGroup withDescription(String description) { this.description = description; return this; } /** 安全组描述信息,长度0-255 * * @return description */ public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public NovaSecurityGroup withId(String id) { this.id = id; return this; } /** 安全组ID,UUID格式 * * @return id */ public String getId() { return id; } public void setId(String id) { this.id = id; } public NovaSecurityGroup withName(String name) { this.name = name; return this; } /** 安全组名字,长度0-255 * * @return name */ public String getName() { return name; } public void setName(String name) { this.name = name; } public NovaSecurityGroup withTenantId(String tenantId) { this.tenantId = tenantId; return this; } /** 租户ID或项目ID * * @return tenantId */ public String getTenantId() { return tenantId; } public void setTenantId(String tenantId) { this.tenantId = tenantId; } public NovaSecurityGroup withRules(List<NovaSecurityGroupCommonRule> rules) { this.rules = rules; return this; } public NovaSecurityGroup addRulesItem(NovaSecurityGroupCommonRule rulesItem) { if (this.rules == null) { this.rules = new ArrayList<>(); } this.rules.add(rulesItem); return this; } public NovaSecurityGroup withRules(Consumer<List<NovaSecurityGroupCommonRule>> rulesSetter) { if (this.rules == null) { this.rules = new ArrayList<>(); } rulesSetter.accept(this.rules); return this; } /** 安全组规则列表 * * @return rules */ public List<NovaSecurityGroupCommonRule> getRules() { return rules; } public void setRules(List<NovaSecurityGroupCommonRule> rules) { this.rules = rules; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } NovaSecurityGroup novaSecurityGroup = (NovaSecurityGroup) o; return Objects.equals(this.description, novaSecurityGroup.description) && Objects.equals(this.id, novaSecurityGroup.id) && Objects.equals(this.name, novaSecurityGroup.name) && Objects.equals(this.tenantId, novaSecurityGroup.tenantId) && Objects.equals(this.rules, novaSecurityGroup.rules); } @Override public int hashCode() { return Objects.hash(description, id, name, tenantId, rules); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class NovaSecurityGroup {\n"); sb.append(" description: ").append(toIndentedString(description)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" tenantId: ").append(toIndentedString(tenantId)).append("\n"); sb.append(" rules: ").append(toIndentedString(rules)).append("\n"); sb.append("}"); return sb.toString(); } /** Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
package com.devonfw.cobigen.templates.devon4j.test.utils.resources; import java.util.List; import java.util.Set; import javax.ws.rs.GET; import javax.ws.rs.PathParam; import com.devonfw.cobigen.templates.devon4j.test.utils.resources.dataaccess.api.DeepEntity; @SuppressWarnings("javadoc") public class TestClass { private int primitive; private int[] primitiveArray; private Integer boxed; private String object; private String[] objectArray; private TestEntity entity; private List<TestEntity> entitys; private Set<TestEntity> setEntitys; private DeepEntity deepEntity; @GET public String methodWithReturnType(String one, @PathParam("id") int two) { return null; } public void methodWithVoidReturnType(boolean one) { } public void noParameters() { } }
/* * Copyright 2013-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.kubernetes.client.config.it; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RestController; /** * @author Ryan Baxter */ @SpringBootApplication @RestController public class KubernetesConfigClientApplicationIt { @Autowired private MyConfigurationProperties configurationProperties; @GetMapping("/myProperty") public String myProperty() { return configurationProperties.getMyProperty(); } @GetMapping("/mySecret") public String mySecret() { return configurationProperties.getMySecret(); } public static void main(String[] args) { SpringApplication.run(KubernetesConfigClientApplicationIt.class, args); } @Configuration @EnableConfigurationProperties(MyConfigurationProperties.class) class MyConfig { } }
/* * Copyright (C) 2020 ActiveJ LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.activej.serializer.impl; import io.activej.codegen.expression.Expression; import io.activej.codegen.expression.Variable; import io.activej.serializer.AbstractSerializerDef; import io.activej.serializer.CompatibilityLevel; import java.net.Inet6Address; import java.net.NetworkInterface; import static io.activej.codegen.expression.Expressions.*; import static io.activej.serializer.impl.SerializerExpressions.readBytes; import static io.activej.serializer.impl.SerializerExpressions.writeBytes; public final class SerializerDefInet6Address extends AbstractSerializerDef { @Override public Class<?> getEncodeType() { return Inet6Address.class; } @Override public Expression encoder(StaticEncoders staticEncoders, Expression buf, Variable pos, Expression value, int version, CompatibilityLevel compatibilityLevel) { return writeBytes(buf, pos, call(value, "getAddress")); } @Override public Expression decoder(StaticDecoders staticDecoders, Expression in, int version, CompatibilityLevel compatibilityLevel) { return let(arrayNew(byte[].class, value(16)), array -> sequence( readBytes(in, array), staticCall(getDecodeType(), "getByAddress", nullRef(String.class), array, nullRef(NetworkInterface.class)))); } }
package io.airlift.stats; import io.airlift.units.Duration; import java.util.Random; import java.util.concurrent.TimeUnit; public class BenchmarkQuantileDigest { public static void main(String[] args) throws Exception { Duration warmupTime = new Duration(3, TimeUnit.SECONDS); Duration benchmarkTime = new Duration(5, TimeUnit.SECONDS); final QuantileDigest digest = new QuantileDigest(0.01, 0, new TestingTicker(), true); final Random random = new Random(); Benchmark.Results results = Benchmark.run(new Runnable() { public void run() { digest.add(Math.abs(random.nextInt(100000))); } }, warmupTime, benchmarkTime); digest.validate(); System.out.println(String.format("Processed %s entries in %s ms. Insertion rate = %s entries/s (%.4fµs per operation)", results.getOperations(), results.getTime().getValue(TimeUnit.MILLISECONDS), results.getOperationsPerSecond(), results.getTimePerOperation().getValue(TimeUnit.MICROSECONDS))); System.out.println(String.format("Compressions: %s, %s entries/compression", digest.getCompressions(), digest.getCount() / digest.getCompressions())); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.infra.federation.executor.customized; import org.apache.calcite.interpreter.InterpretableConvention; import org.apache.calcite.interpreter.InterpretableConverter; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.RelNode; import org.apache.shardingsphere.infra.executor.sql.execute.engine.driver.jdbc.JDBCExecutionUnit; import org.apache.shardingsphere.infra.executor.sql.execute.engine.driver.jdbc.JDBCExecutorCallback; import org.apache.shardingsphere.infra.executor.sql.execute.result.ExecuteResult; import org.apache.shardingsphere.infra.executor.sql.prepare.driver.DriverExecutionPrepareEngine; import org.apache.shardingsphere.infra.federation.executor.FederationContext; import org.apache.shardingsphere.infra.federation.executor.FederationExecutor; import org.apache.shardingsphere.infra.federation.optimizer.ShardingSphereOptimizer; import org.apache.shardingsphere.infra.federation.optimizer.context.OptimizerContext; import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; /** * Customized filterable executor. */ public final class CustomizedFilterableExecutor implements FederationExecutor { private final String databaseName; private final String schemaName; private final ShardingSphereOptimizer optimizer; public CustomizedFilterableExecutor(final String databaseName, final String schemaName, final OptimizerContext context) { this.databaseName = databaseName; this.schemaName = schemaName; optimizer = new ShardingSphereOptimizer(context); } @Override public ResultSet executeQuery(final DriverExecutionPrepareEngine<JDBCExecutionUnit, Connection> prepareEngine, final JDBCExecutorCallback<? extends ExecuteResult> callback, final FederationContext federationContext) throws SQLException { // TODO return null; } @Override public ResultSet getResultSet() { return null; } private Enumerable<Object[]> execute(final SQLStatement sqlStatement) { // TODO return execute(optimizer.optimize(databaseName, schemaName, sqlStatement)); } private Enumerable<Object[]> execute(final RelNode bestPlan) { RelOptCluster cluster = optimizer.getContext().getPlannerContexts().get(databaseName).getConverters().get(schemaName).getCluster(); return new FederateInterpretableConverter( cluster, cluster.traitSetOf(InterpretableConvention.INSTANCE), bestPlan).bind(new CustomizedFilterableExecuteDataContext(databaseName, schemaName, optimizer.getContext())); } @Override public void close() { // TODO } public static final class FederateInterpretableConverter extends InterpretableConverter { public FederateInterpretableConverter(final RelOptCluster cluster, final RelTraitSet traits, final RelNode input) { super(cluster, traits, input); } } }
/* * Copyright 2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.wonwoo.dynamodb; import com.github.wonwoo.dynamodb.domain.Person; import com.github.wonwoo.dynamodb.domain.PersonRepository; import com.github.wonwoo.dynamodb.test.autoconfigure.DynamoTest; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.junit4.SpringRunner; import java.util.List; import static org.assertj.core.api.Assertions.assertThat; /** * @author wonwoo */ @DynamoTest @RunWith(SpringRunner.class) public class PersonRepositoryTests { @Autowired private PersonRepository personRepository; @Test public void save() { List<Person> persons = personRepository.findAll(); assertThat(persons).hasSize(2); } }
package cn.skill6.common.utility; import cn.skill6.common.constant.Encode; import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpEntity; import org.apache.http.NameValuePair; import org.apache.http.ParseException; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * 基于apache http工具包自定义http请求工具类,请求失败返回null * * @author 何明胜 * @version 1.1 * @since 2018年10月29日 上午12:14:14 */ @Slf4j public final class HttpsClient { /** * 封装get请求 * * @param url 请求URL * @return get请求结果 */ public static String doGet(String url) { return doGet(url, null); } /** * 封装get请求 * * @param url 请求URL * @param params 请求参数 * @return get请求结果 */ public static String doGet(String url, Map<String, String> params) { CloseableHttpClient httpClient = null; CloseableHttpResponse httpResponse = null; String response = null; if (MapUtils.isNotEmpty(params)) { UrlEncodedFormEntity urlEncodedFormEntity = paramsConfig(params); String paramStr = null; try { paramStr = EntityUtils.toString(urlEncodedFormEntity); } catch (ParseException | IOException e) { log.error("参数转换失败", e); } if (StringUtils.isEmpty(paramStr)) { return response; } url = StringUtils.join(url, "?", paramStr); } HttpGet httpGet = new HttpGet(url); httpGet.setConfig(requestConfig()); httpClient = HttpClients.createDefault(); try { httpResponse = httpClient.execute(httpGet); HttpEntity entity = httpResponse.getEntity(); response = EntityUtils.toString(entity); } catch (IOException e) { log.error("http请求失败", e); } closeResource(httpResponse, httpClient); return response; } /** * 封装post请求 * * @param url 请求URL * @return post请求结果 */ public static String doPost(String url) { return doPost(url, null); } /** * 封装post请求 * * @param url 请求URL * @param params 请求参数 * @return post请求结果 */ public static String doPost(String url, Map<String, String> params) { CloseableHttpClient httpClient = null; CloseableHttpResponse httpResponse = null; String response = null; HttpPost httpPost = new HttpPost(url); httpPost.setConfig(requestConfig()); httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded"); if (MapUtils.isNotEmpty(params)) { UrlEncodedFormEntity urlEncodedFormEntity = paramsConfig(params); httpPost.setEntity(urlEncodedFormEntity); } httpClient = HttpClients.createDefault(); try { httpResponse = httpClient.execute(httpPost); HttpEntity entity = httpResponse.getEntity(); response = EntityUtils.toString(entity); } catch (ParseException | IOException e) { log.error("http请求失败", e); } closeResource(httpResponse, httpClient); return response; } /** * 请求参数配置 * * @return 请求参数实体 */ private static UrlEncodedFormEntity paramsConfig(Map<String, String> params) { UrlEncodedFormEntity urlEncodedFormEntity = null; List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>(); for (Map.Entry<String, String> entry : params.entrySet()) { BasicNameValuePair nameValuePair = new BasicNameValuePair(entry.getKey(), entry.getValue()); nameValuePairs.add(nameValuePair); } try { urlEncodedFormEntity = new UrlEncodedFormEntity(nameValuePairs, Encode.DEFAULT_ENCODE); } catch (UnsupportedEncodingException e) { log.error("构建http参数失败", e); } return urlEncodedFormEntity; } /** * 获取请求配置 * * @return 请求配置 */ private static RequestConfig requestConfig() { return RequestConfig.custom() .setConnectTimeout(35000) // 连接主机服务超时时间 .setConnectionRequestTimeout(35000) // 请求超时时间 .setSocketTimeout(60000) // 数据读取超时时间 .build(); } /** * 关闭资源 * * @param httpResponse http返回体 * @param httpClient http客户端 */ private static void closeResource(CloseableHttpResponse httpResponse, CloseableHttpClient httpClient) { if (httpResponse != null) { try { httpResponse.close(); } catch (IOException e) { log.warn("httpResponse 关闭异常", e); } } if (httpClient != null) { try { httpClient.close(); } catch (IOException e) { log.warn("httpClient 关闭异常", e); } } } }
/* * Copyright (C) 2012 United States Government as represented by the Administrator of the * National Aeronautics and Space Administration. * All Rights Reserved. */ package gov.nasa.worldwind.render; import gov.nasa.worldwind.avlist.AVKey; import gov.nasa.worldwind.cache.GpuResourceCache; import gov.nasa.worldwind.exception.WWRuntimeException; import gov.nasa.worldwind.geom.*; import gov.nasa.worldwind.util.*; import javax.media.opengl.GL2; import javax.media.opengl.glu.*; import java.util.*; import java.util.logging.Level; /** * Renders fast multiple polygons with or without holes in one pass. It relies on a {@link * gov.nasa.worldwind.util.CompoundVecBuffer}. * <p/> * Whether a polygon ring is filled or is a hole in another polygon depends on the vertices winding order and the * winding rule used - see setWindingRule(String). * * @author Dave Collins * @author Patrick Murris * @version $Id: SurfacePolygons.java 1171 2013-02-11 21:45:02Z dcollins $ */ public class SurfacePolygons extends SurfacePolylines // TODO: Review { protected int[] polygonRingGroups; protected String windingRule = AVKey.CLOCKWISE; protected boolean needsInteriorTessellation = true; protected WWTexture texture; protected Object interiorDisplayListCacheKey = new Object(); public SurfacePolygons(CompoundVecBuffer buffer) { super(buffer); } public SurfacePolygons(Sector sector, CompoundVecBuffer buffer) { super(sector, buffer); } /** * Get a copy of the polygon ring groups array - can be null. * <p/> * When not null the polygon ring groups array identifies the starting sub buffer index for each polygon. In that * case rings from a same group will be tesselated together as part of the same polygon. * <p/> * When <code>null</code> polygon rings that follow the current winding rule are tessellated separatly as different * polygons. Rings that are reverse winded are considered holes to be applied to the last straight winded ring * polygon. * * @return a copy of the polygon ring groups array - can be null. */ public int[] getPolygonRingGroups() { return this.polygonRingGroups.clone(); } /** * Set the polygon ring groups array - can be null. * <p/> * When not null the polygon ring groups array identifies the starting sub buffer index for each polygon. In that * case rings from a same group will be tesselated together as part of the same polygon. * <p/> * When <code>null</code> polygon rings that follow the current winding rule are tessellated separatly as different * polygons. Rings that are reverse winded are considered holes to be applied to the last straight winded ring * polygon. * * @param ringGroups a copy of the polygon ring groups array - can be null. */ public void setPolygonRingGroups(int[] ringGroups) { this.polygonRingGroups = ringGroups.clone(); this.onGeometryChanged(); } /** * Get the winding rule used when tessellating polygons. Can be one of {@link AVKey#CLOCKWISE} (default) or {@link * AVKey#COUNTER_CLOCKWISE}. * <p/> * When set to {@link AVKey#CLOCKWISE} polygons which run clockwise will be filled and those which run counter * clockwise will produce 'holes'. The interpretation is reversed when the winding rule is set to {@link * AVKey#COUNTER_CLOCKWISE}. * * @return the winding rule used when tessellating polygons. */ public String getWindingRule() { return this.windingRule; } /** * Set the winding rule used when tessellating polygons. Can be one of {@link AVKey#CLOCKWISE} (default) or {@link * AVKey#COUNTER_CLOCKWISE}. * <p/> * When set to {@link AVKey#CLOCKWISE} polygons which run clockwise will be filled and those which run counter * clockwise will produce 'holes'. The interpretation is reversed when the winding rule is set to {@link * AVKey#COUNTER_CLOCKWISE}. * * @param windingRule the winding rule to use when tessellating polygons. */ public void setWindingRule(String windingRule) { this.windingRule = windingRule; this.onGeometryChanged(); } protected void onGeometryChanged() { this.needsInteriorTessellation = true; super.onGeometryChanged(); } protected void drawInterior(DrawContext dc, SurfaceTileDrawContext sdc) { // Exit immediately if the polygon has no coordinate data. if (this.buffer.size() == 0) return; Position referencePos = this.getReferencePosition(); if (referencePos == null) return; // Attempt to tessellate the polygon's interior if the polygon's interior display list is uninitialized, or if // the polygon is marked as needing tessellation. int[] dlResource = (int[]) dc.getGpuResourceCache().get(this.interiorDisplayListCacheKey); if (dlResource == null || this.needsInteriorTessellation) dlResource = this.tessellateInterior(dc, referencePos); // Exit immediately if the polygon's interior failed to tessellate. The cause has already been logged by // tessellateInterior(). if (dlResource == null) return; GL2 gl = dc.getGL().getGL2(); // GL initialization checks for GL2 compatibility. this.applyInteriorState(dc, sdc, this.getActiveAttributes(), this.getTexture(), referencePos); gl.glCallList(dlResource[0]); if (this.crossesDateLine) { gl.glPushMatrix(); try { // Apply hemisphere offset and draw again double hemisphereSign = Math.signum(referencePos.getLongitude().degrees); gl.glTranslated(360 * hemisphereSign, 0, 0); gl.glCallList(dlResource[0]); } finally { gl.glPopMatrix(); } } } protected WWTexture getTexture() { if (this.getActiveAttributes().getImageSource() == null) return null; if (this.texture == null && this.getActiveAttributes().getImageSource() != null) this.texture = new BasicWWTexture(this.getActiveAttributes().getImageSource(), true); return this.texture; } //**************************************************************// //******************** Interior Tessellation *****************// //**************************************************************// protected int[] tessellateInterior(DrawContext dc, LatLon referenceLocation) { if (dc == null) { String message = Logging.getMessage("nullValue.DrawContextIsNull"); Logging.logger().severe(message); throw new IllegalArgumentException(message); } try { return this.doTessellateInterior(dc, referenceLocation); } catch (OutOfMemoryError e) { String message = Logging.getMessage("generic.ExceptionWhileTessellating", this); Logging.logger().log(Level.SEVERE, message, e); //noinspection ThrowableInstanceNeverThrown dc.addRenderingException(new WWRuntimeException(message, e)); this.handleUnsuccessfulInteriorTessellation(dc); return null; } } protected int[] doTessellateInterior(DrawContext dc, LatLon referenceLocation) { GL2 gl = dc.getGL().getGL2(); // GL initialization checks for GL2 compatibility. GLUtessellatorCallback cb = GLUTessellatorSupport.createOGLDrawPrimitivesCallback(gl); int[] dlResource = new int[] {gl.glGenLists(1), 1}; GLUTessellatorSupport glts = new GLUTessellatorSupport(); try { glts.beginTessellation(cb, new Vec4(0, 0, 1)); gl.glNewList(dlResource[0], GL2.GL_COMPILE); int numBytes = this.tessellateInteriorVertices(glts.getGLUtessellator(), referenceLocation); glts.endTessellation(); gl.glEndList(); this.needsInteriorTessellation = false; dc.getGpuResourceCache().put(this.interiorDisplayListCacheKey, dlResource, GpuResourceCache.DISPLAY_LISTS, numBytes); return dlResource; } catch (Throwable e) { // Free any heap memory used for tessellation immediately. If tessellation has consumed all available heap // memory, we must free memory used by tessellation immediately or subsequent operations such as message // logging will fail. gl.glEndList(); glts.endTessellation(); gl.glDeleteLists(dlResource[0], dlResource[1]); String message = Logging.getMessage("generic.ExceptionWhileTessellating", this); Logging.logger().log(Level.SEVERE, message, e); //noinspection ThrowableInstanceNeverThrown dc.addRenderingException(new WWRuntimeException(message, e)); this.handleUnsuccessfulInteriorTessellation(dc); return null; } } protected void handleUnsuccessfulInteriorTessellation(DrawContext dc) { // If tessellating the polygon's interior was unsuccessful, we modify the polygon to avoid any additional // tessellation attempts, and free any resources that the polygon won't use. // Replace the polygon's coordinate buffer with an empty CompoundVecBuffer. This ensures that any rendering // code won't attempt to re-tessellate this polygon. this.buffer = CompoundVecBuffer.emptyCompoundVecBuffer(2); // Flag the polygon as having changed, since we've replaced its coordinate buffer with an empty // CompoundVecBuffer. this.onGeometryChanged(); } protected int tessellateInteriorVertices(GLUtessellator tess, LatLon referenceLocation) { // Setup the winding order to correctly tessellate the outer and inner rings. GLU.gluTessProperty(tess, GLU.GLU_TESS_WINDING_RULE, this.windingRule.equals(AVKey.CLOCKWISE) ? GLU.GLU_TESS_WINDING_NEGATIVE : GLU.GLU_TESS_WINDING_POSITIVE); this.crossesDateLine = false; int numBytes = 0; int numRings = this.buffer.size(); if (this.polygonRingGroups == null) { boolean inBeginPolygon = false; // Polygon rings are drawn following the sub buffers order. If the winding rule is CW all clockwise // rings are considered an outer ring possibly followed by counter clock wise inner rings. for (int i = 0; i < numRings; i++) { VecBuffer vecBuffer = this.buffer.subBuffer(i); numBytes += vecBuffer.getSize() * 3 * 4; // 3 float coords per vertex // Start a new polygon for each outer ring if (WWMath.computeWindingOrderOfLocations(vecBuffer.getLocations()).equals(this.getWindingRule())) { if (inBeginPolygon) GLU.gluTessEndPolygon(tess); GLU.gluTessBeginPolygon(tess, null); inBeginPolygon = true; } if (tessellateRing(tess, vecBuffer, referenceLocation)) this.crossesDateLine = true; } if (inBeginPolygon) GLU.gluTessEndPolygon(tess); } else { // Tessellate one polygon per ring group int numGroups = this.polygonRingGroups.length; for (int group = 0; group < numGroups; group++) { int groupStart = this.polygonRingGroups[group]; int groupLength = (group == numGroups - 1) ? numRings - groupStart : this.polygonRingGroups[group + 1] - groupStart; GLU.gluTessBeginPolygon(tess, null); for (int i = 0; i < groupLength; i++) { VecBuffer subBuffer = this.buffer.subBuffer(groupStart + i); numBytes += subBuffer.getSize() * 3 * 4; // 3 float coords per vertex if (tessellateRing(tess, subBuffer, referenceLocation)) this.crossesDateLine = true; } GLU.gluTessEndPolygon(tess); } } return numBytes; } protected boolean tessellateRing(GLUtessellator tess, VecBuffer vecBuffer, LatLon referenceLocation) { // Check for pole wrapping shape List<double[]> dateLineCrossingPoints = this.computeDateLineCrossingPoints(vecBuffer); int pole = this.computePole(dateLineCrossingPoints); double[] poleWrappingPoint = this.computePoleWrappingPoint(pole, dateLineCrossingPoints); GLU.gluTessBeginContour(tess); Iterable<double[]> iterable = vecBuffer.getCoords(3); boolean dateLineCrossed = false; int sign = 0; double[] previousPoint = null; for (double[] coords : iterable) { if (poleWrappingPoint != null && previousPoint != null && poleWrappingPoint[0] == previousPoint[0] && poleWrappingPoint[1] == previousPoint[1]) { previousPoint = coords.clone(); // Wrapping a pole double[] dateLinePoint1 = this.computeDateLineEntryPoint(poleWrappingPoint, coords); double[] polePoint1 = new double[] {180 * Math.signum(poleWrappingPoint[0]), 90d * pole, 0}; double[] dateLinePoint2 = dateLinePoint1.clone(); double[] polePoint2 = polePoint1.clone(); dateLinePoint2[0] *= -1; polePoint2[0] *= -1; // Move to date line then to pole tessVertex(tess, dateLinePoint1, referenceLocation); tessVertex(tess, polePoint1, referenceLocation); // Move to the other side of the date line tessVertex(tess, polePoint2, referenceLocation); tessVertex(tess, dateLinePoint2, referenceLocation); // Finally, draw current point past the date line tessVertex(tess, coords, referenceLocation); dateLineCrossed = true; } else { if (previousPoint != null && Math.abs(previousPoint[0] - coords[0]) > 180) { // Crossing date line, sum departure point longitude sign for hemisphere offset sign += (int) Math.signum(previousPoint[0]); dateLineCrossed = true; } previousPoint = coords.clone(); coords[0] += sign * 360; // apply hemisphere offset tessVertex(tess, coords, referenceLocation); } } GLU.gluTessEndContour(tess); return dateLineCrossed; } private static void tessVertex(GLUtessellator tess, double[] coords, LatLon referenceLocation) { double[] vertex = new double[3]; vertex[0] = coords[0] - referenceLocation.getLongitude().degrees; vertex[1] = coords[1] - referenceLocation.getLatitude().degrees; GLU.gluTessVertex(tess, vertex, 0, vertex); } // --- Pole wrapping shapes handling --- protected List<double[]> computeDateLineCrossingPoints(VecBuffer vecBuffer) { // Shapes that include a pole will yield an odd number of points List<double[]> list = new ArrayList<double[]>(); Iterable<double[]> iterable = vecBuffer.getCoords(3); double[] previousPoint = null; for (double[] coords : iterable) { if (previousPoint != null && Math.abs(previousPoint[0] - coords[0]) > 180) list.add(previousPoint); previousPoint = coords; } return list; } protected int computePole(List<double[]> dateLineCrossingPoints) { int sign = 0; for (double[] point : dateLineCrossingPoints) { sign += Math.signum(point[0]); } if (sign == 0) return 0; // If we cross the date line going west (from a negative longitude) with a clockwise polygon, // then the north pole (positive) is included. return this.getWindingRule().equals(AVKey.CLOCKWISE) && sign < 0 ? 1 : -1; } protected double[] computePoleWrappingPoint(int pole, List<double[]> dateLineCrossingPoints) { if (pole == 0) return null; // Find point with latitude closest to pole int idx = -1; double max = pole < 0 ? 90 : -90; for (int i = 0; i < dateLineCrossingPoints.size(); i++) { double[] point = dateLineCrossingPoints.get(i); if (pole < 0 && point[1] < max) // increasing latitude toward north pole { idx = i; max = point[1]; } if (pole > 0 && point[1] > max) // decreasing latitude toward south pole { idx = i; max = point[1]; } } return dateLineCrossingPoints.get(idx); } protected double[] computeDateLineEntryPoint(double[] from, double[] to) { // Linear interpolation between from and to at the date line double dLat = to[1] - from[1]; double dLon = 360 - Math.abs(to[0] - from[0]); double s = Math.abs(180 * Math.signum(from[0]) - from[0]) / dLon; double lat = from[1] + dLat * s; double lon = 180 * Math.signum(from[0]); // same side as from return new double[] {lon, lat, 0}; } }
/** * %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% * * Copyright (c) 2012 - SCAPI (http://crypto.biu.ac.il/scapi) * This file is part of the SCAPI project. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * We request that any publication and/or code referring to and/or based on SCAPI contain an appropriate citation to SCAPI, including a reference to * http://crypto.biu.ac.il/SCAPI. * * SCAPI uses Crypto++, Miracl, NTL and Bouncy Castle. Please see these projects for any further licensing issues. * %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% * */ package edu.biu.scapi.interactiveMidProtocols.ot.fullSimulation; import edu.biu.scapi.primitives.dlog.GroupElement; /** * This class holds the Group Elements calculated in the preprocess phase. * * @author Cryptography and Computer Security Research Group Department of Computer Science Bar-Ilan University (Moriya Farbstein) * */ public class OTFullSimPreprocessPhaseValues { private GroupElement g0, g1, h0, h1; //Values calculated by the preprocess phase. public OTFullSimPreprocessPhaseValues(GroupElement g0, GroupElement g1, GroupElement h0, GroupElement h1){ this.g0 = g0; this.g1 = g1; this.h1 = h1; this.h0 = h0; } public GroupElement getG0() { return g0; } public GroupElement getG1() { return g1; } public GroupElement getH0() { return h0; } public GroupElement getH1() { return h1; } }
/* * Central Repository * * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.centralrepository.ingestmodule; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.logging.Level; import java.util.stream.Collectors; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbPlatformEnum; import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener; import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.healthmonitor.HealthMonitor; import org.sleuthkit.autopsy.healthmonitor.TimingMetric; import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; import org.sleuthkit.datamodel.HashUtility; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; /** * Ingest module for inserting entries into the Central Repository database on * ingest of a data source */ @Messages({"CentralRepoIngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)", "CentralRepoIngestModule.prevCaseComment.text=Previous Case: "}) final class CentralRepoIngestModule implements FileIngestModule { private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName(); static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = true; static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = true; static final boolean DEFAULT_CREATE_CR_PROPERTIES = true; private final static Logger logger = Logger.getLogger(CentralRepoIngestModule.class.getName()); private final IngestServices services = IngestServices.getInstance(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private static final IngestModuleReferenceCounter warningMsgRefCounter = new IngestModuleReferenceCounter(); private long jobId; private CorrelationCase eamCase; private CorrelationDataSource eamDataSource; private CorrelationAttributeInstance.Type filesType; private final boolean flagTaggedNotableItems; private final boolean flagPreviouslySeenDevices; private Blackboard blackboard; private final boolean createCorrelationProperties; /** * Instantiate the Correlation Engine ingest module. * * @param settings The ingest settings for the module instance. */ CentralRepoIngestModule(IngestSettings settings) { flagTaggedNotableItems = settings.isFlagTaggedNotableItems(); flagPreviouslySeenDevices = settings.isFlagPreviousDevices(); createCorrelationProperties = settings.shouldCreateCorrelationProperties(); } @Override public ProcessResult process(AbstractFile abstractFile) { if (EamDb.isEnabled() == false) { /* * Not signaling an error for now. This is a workaround for the way * all newly didscovered ingest modules are automatically anabled. * * TODO (JIRA-2731): Add isEnabled API for ingest modules. */ return ProcessResult.OK; } try { blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); } catch (NoCurrentCaseException ex) { logger.log(Level.SEVERE, "Exception while getting open case.", ex); return ProcessResult.ERROR; } if (!EamArtifactUtil.isSupportedAbstractFileType(abstractFile)) { return ProcessResult.OK; } if (abstractFile.getKnown() == TskData.FileKnown.KNOWN) { return ProcessResult.OK; } EamDb dbManager; try { dbManager = EamDb.getInstance(); } catch (EamDbException ex) { logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); return ProcessResult.ERROR; } // only continue if we are correlating filesType if (!filesType.isEnabled()) { return ProcessResult.OK; } // get the hash because we're going to correlate it String md5 = abstractFile.getMd5Hash(); if ((md5 == null) || (HashUtility.isNoDataMd5(md5))) { return ProcessResult.OK; } /* * Search the central repo to see if this file was previously marked as * being bad. Create artifact if it was. */ if (abstractFile.getKnown() != TskData.FileKnown.KNOWN && flagTaggedNotableItems) { try { TimingMetric timingMetric = HealthMonitor.getTimingMetric("Correlation Engine: Notable artifact query"); List<String> caseDisplayNamesList = dbManager.getListCasesHavingArtifactInstancesKnownBad(filesType, md5); HealthMonitor.submitTimingMetric(timingMetric); if (!caseDisplayNamesList.isEmpty()) { postCorrelatedBadFileToBlackboard(abstractFile, caseDisplayNamesList); } } catch (EamDbException ex) { logger.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS return ProcessResult.ERROR; } catch (CorrelationAttributeNormalizationException ex) { logger.log(Level.INFO, "Error searching database for artifact.", ex); // NON-NLS return ProcessResult.ERROR; } } // insert this file into the central repository if (createCorrelationProperties) { try { CorrelationAttributeInstance cefi = new CorrelationAttributeInstance( filesType, md5, eamCase, eamDataSource, abstractFile.getParentPath() + abstractFile.getName(), null, TskData.FileKnown.UNKNOWN // NOTE: Known status in the CR is based on tagging, not hashes like the Case Database. , abstractFile.getId()); dbManager.addAttributeInstanceBulk(cefi); } catch (EamDbException ex) { logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS return ProcessResult.ERROR; } catch (CorrelationAttributeNormalizationException ex) { logger.log(Level.INFO, "Error adding artifact to bulk artifacts.", ex); // NON-NLS return ProcessResult.ERROR; } } return ProcessResult.OK; } @Override public void shutDown() { IngestEventsListener.decrementCorrelationEngineModuleCount(); if ((EamDb.isEnabled() == false) || (eamCase == null) || (eamDataSource == null)) { return; } EamDb dbManager; try { dbManager = EamDb.getInstance(); } catch (EamDbException ex) { logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); return; } try { dbManager.commitAttributeInstancesBulk(); } catch (EamDbException ex) { logger.log(Level.SEVERE, "Error doing bulk insert of artifacts.", ex); // NON-NLS } try { Long count = dbManager.getCountArtifactInstancesByCaseDataSource(eamDataSource); logger.log(Level.INFO, "{0} artifacts in db for case: {1} ds:{2}", new Object[]{count, eamCase.getDisplayName(), eamDataSource.getName()}); // NON-NLS } catch (EamDbException ex) { logger.log(Level.SEVERE, "Error counting artifacts.", ex); // NON-NLS } // TODO: once we implement shared cache, if refCounter is 1, then submit data in bulk. refCounter.decrementAndGet(jobId); } // see ArtifactManagerTimeTester for details @Messages({ "CentralRepoIngestModule.notfyBubble.title=Central Repository Not Initialized", "CentralRepoIngestModule.errorMessage.isNotEnabled=Central repository settings are not initialized, cannot run Correlation Engine ingest module." }) @Override public void startUp(IngestJobContext context) throws IngestModuleException { IngestEventsListener.incrementCorrelationEngineModuleCount(); /* * Tell the IngestEventsListener to flag notable items based on the * current module's configuration. This is a work around for the lack of * an artifacts pipeline. Note that this can be changed by another * module instance. All modules are affected by the value. While not * ideal, this will be good enough until a better solution can be * posited. * * Note: Flagging cannot be disabled if any other instances of the * Correlation Engine module are running. This restriction is to prevent * missing results in the case where the first module is flagging * notable items, and the proceeding module (with flagging disabled) * causes the first to stop flagging. */ if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagNotableItems()) { IngestEventsListener.setFlagNotableItems(flagTaggedNotableItems); } if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagSeenDevices()) { IngestEventsListener.setFlagSeenDevices(flagPreviouslySeenDevices); } if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.shouldCreateCrProperties()) { IngestEventsListener.setCreateCrProperties(createCorrelationProperties); } if (EamDb.isEnabled() == false) { /* * Not throwing the customary exception for now. This is a * workaround for the way all newly didscovered ingest modules are * automatically anabled. * * TODO (JIRA-2731): Add isEnabled API for ingest modules. */ if (RuntimeProperties.runningWithGUI()) { if (1L == warningMsgRefCounter.incrementAndGet(jobId)) { MessageNotifyUtil.Notify.warn(Bundle.CentralRepoIngestModule_notfyBubble_title(), Bundle.CentralRepoIngestModule_errorMessage_isNotEnabled()); } } return; } Case autopsyCase; try { autopsyCase = Case.getCurrentCaseThrows(); } catch (NoCurrentCaseException ex) { logger.log(Level.SEVERE, "Exception while getting open case.", ex); throw new IngestModuleException("Exception while getting open case.", ex); } // Don't allow sqlite central repo databases to be used for multi user cases if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) && (EamDbPlatformEnum.getSelectedPlatform() == EamDbPlatformEnum.SQLITE)) { logger.log(Level.SEVERE, "Cannot run correlation engine on a multi-user case with a SQLite central repository."); throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS } jobId = context.getJobId(); EamDb centralRepoDb; try { centralRepoDb = EamDb.getInstance(); } catch (EamDbException ex) { logger.log(Level.SEVERE, "Error connecting to central repository database.", ex); // NON-NLS throw new IngestModuleException("Error connecting to central repository database.", ex); // NON-NLS } try { filesType = centralRepoDb.getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID); } catch (EamDbException ex) { logger.log(Level.SEVERE, "Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS throw new IngestModuleException("Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS } try { eamCase = centralRepoDb.getCase(autopsyCase); } catch (EamDbException ex) { throw new IngestModuleException("Unable to get case from central repository database ", ex); } try { eamDataSource = CorrelationDataSource.fromTSKDataSource(eamCase, context.getDataSource()); } catch (EamDbException ex) { logger.log(Level.SEVERE, "Error getting data source info.", ex); // NON-NLS throw new IngestModuleException("Error getting data source info.", ex); // NON-NLS } // TODO: once we implement a shared cache, load/init it here w/ syncronized and define reference counter // if we are the first thread / module for this job, then make sure the case // and image exist in the DB before we associate artifacts with it. if (refCounter.incrementAndGet(jobId) == 1) { // ensure we have this data source in the EAM DB try { if (null == centralRepoDb.getDataSource(eamCase, eamDataSource.getDataSourceObjectID())) { centralRepoDb.newDataSource(eamDataSource); } } catch (EamDbException ex) { logger.log(Level.SEVERE, "Error adding data source to Central Repository.", ex); // NON-NLS throw new IngestModuleException("Error adding data source to Central Repository.", ex); // NON-NLS } } } /** * Post a new interesting artifact for the file marked bad. * * @param abstractFile The file from which to create an artifact. * @param caseDisplayNames Case names to be added to a TSK_COMMON attribute. */ private void postCorrelatedBadFileToBlackboard(AbstractFile abstractFile, List<String> caseDisplayNames) { Collection<BlackboardAttribute> attributes = Arrays.asList( new BlackboardAttribute( TSK_SET_NAME, MODULE_NAME, Bundle.CentralRepoIngestModule_prevTaggedSet_text()), new BlackboardAttribute( TSK_COMMENT, MODULE_NAME, Bundle.CentralRepoIngestModule_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(",")))); try { // Create artifact if it doesn't already exist. if (!blackboard.artifactExists(abstractFile, TSK_INTERESTING_FILE_HIT, attributes)) { BlackboardArtifact tifArtifact = abstractFile.newArtifact(TSK_INTERESTING_FILE_HIT); tifArtifact.addAttributes(attributes); try { // index the artifact for keyword search blackboard.postArtifact(tifArtifact, MODULE_NAME); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS } // send inbox message sendBadFileInboxMessage(tifArtifact, abstractFile.getName(), abstractFile.getMd5Hash()); } } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS } catch (IllegalStateException ex) { logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS } } /** * Post a message to the ingest inbox alerting the user that a bad file was * found. * * @param artifact badFile Blackboard Artifact * @param name badFile's name * @param md5Hash badFile's md5 hash */ @Messages({"CentralRepoIngestModule.postToBB.fileName=File Name", "CentralRepoIngestModule.postToBB.md5Hash=MD5 Hash", "CentralRepoIngestModule.postToBB.hashSetSource=Source of Hash", "CentralRepoIngestModule.postToBB.eamHit=Central Repository", "# {0} - Name of file that is Notable", "CentralRepoIngestModule.postToBB.knownBadMsg=Notable: {0}"}) public void sendBadFileInboxMessage(BlackboardArtifact artifact, String name, String md5Hash) { StringBuilder detailsSb = new StringBuilder(); //details detailsSb.append("<table border='0' cellpadding='4' width='280'>"); //NON-NLS //hit detailsSb.append("<tr>"); //NON-NLS detailsSb.append("<th>") //NON-NLS .append(Bundle.CentralRepoIngestModule_postToBB_fileName()) .append("</th>"); //NON-NLS detailsSb.append("<td>") //NON-NLS .append(name) .append("</td>"); //NON-NLS detailsSb.append("</tr>"); //NON-NLS detailsSb.append("<tr>"); //NON-NLS detailsSb.append("<th>") //NON-NLS .append(Bundle.CentralRepoIngestModule_postToBB_md5Hash()) .append("</th>"); //NON-NLS detailsSb.append("<td>").append(md5Hash).append("</td>"); //NON-NLS detailsSb.append("</tr>"); //NON-NLS detailsSb.append("<tr>"); //NON-NLS detailsSb.append("<th>") //NON-NLS .append(Bundle.CentralRepoIngestModule_postToBB_hashSetSource()) .append("</th>"); //NON-NLS detailsSb.append("<td>").append(Bundle.CentralRepoIngestModule_postToBB_eamHit()).append("</td>"); //NON-NLS detailsSb.append("</tr>"); //NON-NLS detailsSb.append("</table>"); //NON-NLS services.postMessage(IngestMessage.createDataMessage(CentralRepoIngestModuleFactory.getModuleName(), Bundle.CentralRepoIngestModule_postToBB_knownBadMsg(name), detailsSb.toString(), name + md5Hash, artifact)); } }
/* * ____ _ ____ _ _ _____ ___ _ * / ___| / \ | _ \| \ | |_ _\ \ / / \ | | * | | / _ \ | |_) | \| || | \ \ / / _ \ | | * | |___/ ___ \| _ <| |\ || | \ V / ___ \| |___ * \____/_/ \_\_| \_\_| \_|___| \_/_/ \_\_____| * * https://github.com/yingzhuo/carnival */ package com.github.yingzhuo.carnival.captcha.service.google.filter; import java.awt.image.BufferedImageOp; import java.util.List; /** * @author Piotr Piastucki * @since 1.10.6 */ public class ConfigurableFilterFactory extends AbstractFilterFactory { private List<BufferedImageOp> filters; @Override public List<BufferedImageOp> getFilters() { return filters; } public void setFilters(List<BufferedImageOp> filters) { this.filters = filters; } }
/* * Copyright (c) 2000 by Matt Welsh and The Regents of the University of * California. All rights reserved. * * Permission to use, copy, modify, and distribute this software and its * documentation for any purpose, without fee, and without written agreement is * hereby granted, provided that the above copyright notice and the following * two paragraphs appear in all copies of this software. * * IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT * OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF THE UNIVERSITY OF * CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY * AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS * ON AN "AS IS" BASIS, AND THE UNIVERSITY OF CALIFORNIA HAS NO OBLIGATION TO * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. * * Author: Matt Welsh <mdw@cs.berkeley.edu> * */ package seda.sandStorm.lib.Gnutella; /** * A Gnutella push packet. */ public class GnutellaPushPacket extends GnutellaPacket { private static final boolean DEBUG = false; public GnutellaPushPacket(byte[] payload) { super(GNUTELLA_FN_PUSH, payload); } public GnutellaPushPacket(GnutellaGUID guid, int ttl, int hops, byte[] payload) { super(guid, GNUTELLA_FN_PUSH, ttl, hops, payload); } public String toString() { return "GnutellaPushPacket"; } }
package de.fraunhofer.iem.icognicrypt.IdeSupport.projects.Outputs; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import de.fraunhofer.iem.icognicrypt.exceptions.CogniCryptException; import de.fraunhofer.iem.icognicrypt.settings.IPersistableCogniCryptSettings; import org.jetbrains.annotations.NotNull; import javax.naming.OperationNotSupportedException; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Collection; import java.util.HashSet; import java.util.Set; abstract class InternalOutputFinderBase implements IOutputFinderInternal { private final IPersistableCogniCryptSettings _settings; private static final Logger logger = Logger.getInstance(InternalOutputFinderBase.class); protected InternalOutputFinderBase() { _settings = ServiceManager.getService(IPersistableCogniCryptSettings.class); } @NotNull @Override public final Iterable<File> GetOutputFiles(Project project) throws OperationNotSupportedException, IOException, CogniCryptException { return GetOutputFiles(project, _settings.GetFindOutputOptions()); } @NotNull @Override public final Iterable<File> GetOutputFiles(Project project, Set<OutputFinderOptions.Flags> options) throws CogniCryptException, IOException, OperationNotSupportedException { return GetOutputFilesInternal(project, options); } @NotNull @Override public final Iterable<File> GetOutputFilesFromDialog(Project project) { return GetOutputFilesFromDialogInternal(project); } protected abstract Collection<File> GetModuleOutputs(Project project, Set<OutputFinderOptions.Flags> options) throws IOException, OperationNotSupportedException; protected abstract Collection<File> GetExportedOutputs(Project project, Set<OutputFinderOptions.Flags> options) throws IOException; protected abstract Collection<File> GetRunConfigurationOutput(Project project, Set<OutputFinderOptions.Flags> options) throws IOException, OperationNotSupportedException;; protected abstract Iterable<File> GetOutputFilesFromDialogInternal(Project project); @NotNull protected final Iterable<File> GetOutputFilesInternal(Project project, Set<OutputFinderOptions.Flags> options) throws CogniCryptException, IOException, OperationNotSupportedException { logger.info("Try finding all built .apk files with options: " + options); Path projectPath = Paths.get(project.getBasePath()); if (!Files.exists(projectPath)) throw new CogniCryptException("Root path of the project does not exist."); if (!ValidateProject(project)) throw new CogniCryptException("The project is not valid or supported by this IDE"); HashSet<File> result = new HashSet<>(); if (!options.isEmpty()) { result.addAll(GetRunConfigurationOutput(project, options)); result.addAll(GetModuleOutputs(project, options)); result.addAll(GetExportedOutputs(project, options)); } if (result.isEmpty()) logger.info("Could not find any file"); return result; } protected boolean ValidateProject(Project project) { return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.configuration.schemas.table; import org.apache.ignite.configuration.annotation.Config; import org.apache.ignite.configuration.annotation.Value; import org.apache.ignite.configuration.validation.Immutable; /** * Configuration for primary key constraint in SQL table. */ @Config public class PrimaryKeyConfigurationSchema { /** Primary key columns names. */ @Value @Immutable public String[] columns; /** Primary key affinity columns names. */ @Value(hasDefault = true) @Immutable public String[] affinityColumns = new String[0]; }
/* * Copyright 2011 Tyler Blair. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list * of conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are those of the * authors and contributors and should not be interpreted as representing official policies, * either expressed or implied, of anybody else. */ package com.griefcraft.migration; import com.griefcraft.lwc.LWC; import com.griefcraft.modules.pluginsupport.WorldGuard; import org.bukkit.Material; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.logging.Logger; public class ConfigPost300 implements MigrationUtility { private static Logger logger = Logger.getLogger("Patcher"); // contains the core config equivalent key pairs // e.g locale->core.locale private static Map<String, String> mappings = null; public void run() { LWC lwc = LWC.getInstance(); File configFile = new File("plugins/LWC/lwc.properties"); if (!configFile.exists()) { return; } // delete internal.ini new File("plugins/LWC/internal.ini").delete(); logger.info("Converting lwc.properties to new variants"); // we need to convert.. populate(); // load lwc.properties Properties old = new Properties(); try { InputStream inputStream = new FileInputStream(configFile); old.load(inputStream); inputStream.close(); } catch (Exception e) { e.printStackTrace(); return; } // convert the easy to do mappings for (Map.Entry<String, String> entry : mappings.entrySet()) { String oldKey = entry.getKey(); String newKey = entry.getValue(); // Don't mind me, just making the converted values appear correctly! try { lwc.getConfiguration().setProperty(newKey, Integer.parseInt(old.getProperty(oldKey, ""))); } catch (Exception e) { String value = old.getProperty(oldKey, ""); if (value.equalsIgnoreCase("true") || value.equalsIgnoreCase("false")) { lwc.getConfiguration().setProperty(newKey, Boolean.parseBoolean(old.getProperty(oldKey, ""))); } else { lwc.getConfiguration().setProperty(newKey, old.getProperty(oldKey, "")); } } } // custom mappings, can't be easily done // protection blacklist String protectionBlacklist = old.getProperty("protection-blacklist", "").trim(); if (!protectionBlacklist.isEmpty()) { String[] split = protectionBlacklist.replaceAll(" ", "_").split(","); for (String protection : split) { String blockId = ""; try { blockId = protection; } catch (NumberFormatException e) { } // if it's an int, convert it if (blockId != Material.AIR.name()) { protection = Material.getMaterial(blockId).toString().toLowerCase().replaceAll("block", ""); if (protection.endsWith("_")) { protection = protection.substring(0, protection.length() - 1); } } lwc.getConfiguration().setProperty("protections.blocks." + protection + ".enabled", false); } } // WorldGuard String enforceWorldGuard = old.getProperty("enforce-worldguard-regions"); if (Boolean.parseBoolean(enforceWorldGuard)) { WorldGuard worldGuard = (WorldGuard) lwc.getModuleLoader().getModule(WorldGuard.class); List<String> regions = null; String oldRegions = old.getProperty("worldguard-allowed-regions"); regions = Arrays.asList(oldRegions.split(",")); worldGuard.set("worldguard.enabled", true); worldGuard.set("worldguard.regions", regions); worldGuard.save(); } // we're done, free up the mappings & save mappings = null; lwc.getConfiguration().save(); configFile.delete(); } // populate the mappings table with well the mappings private static void populate() { mappings = new HashMap<>(); mappings.put("allow-block-destruction", "protections.allowBlockDestruction"); mappings.put("auto-update", "core.autoUpdate"); mappings.put("database", "database.adapter"); mappings.put("db-path", "database.path"); mappings.put("default-menu-style", "core.defaultMenuStyle"); mappings.put("deny-redstone", "protections.denyRedstone"); mappings.put("flush-db-interval", "core.flushInterval"); mappings.put("locale", "core.locale"); mappings.put("mysql-database", "database.database"); mappings.put("mysql-host", "database.host"); mappings.put("mysql-pass", "database.password"); mappings.put("mysql-user", "database.username"); mappings.put("show-protection-notices", "core.showNotices"); mappings.put("verbose", "core.verbose"); mappings.put("op-is-lwcadmin", "core.opIsLWCAdmin"); } }
/** * Copyright (c) 2003, www.pdfbox.org * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. Neither the name of pdfbox; nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * http://www.pdfbox.org * */ package org.pdfbox.pdmodel.encryption; import org.pdfbox.cos.COSDictionary; import org.pdfbox.cos.COSInteger; import org.pdfbox.cos.COSName; import org.pdfbox.cos.COSNumber; import org.pdfbox.cos.COSString; import java.io.IOException; /** * This class holds information that is related to the standard PDF encryption. * * See PDF Reference 1.4 section "3.5 Encryption" * * @author <a href="mailto:ben@benlitchfield.com">Ben Litchfield</a> * @version $Revision: 1.7 $ * @deprecated Made deprecated by the new security layer of PDFBox. Use SecurityHandlers instead. */ public class PDStandardEncryption extends PDEncryptionDictionary { /** * The 'Filter' name for this security handler. */ public static final String FILTER_NAME = "Standard"; /** * The default revision of one is not specified. */ public static final int DEFAULT_REVISION = 3; /** * Encryption revision 2. */ public static final int REVISION2 = 2; /** * Encryption revision 3. */ public static final int REVISION3 = 3; /** * Encryption revision 4. */ public static final int REVISION4 = 4; /** * The default set of permissions which is to allow all. */ public static final int DEFAULT_PERMISSIONS = 0xFFFFFFFF ^ 3;//bits 0 & 1 need to be zero private static final int PRINT_BIT = 3; private static final int MODIFICATION_BIT = 4; private static final int EXTRACT_BIT = 5; private static final int MODIFY_ANNOTATIONS_BIT = 6; private static final int FILL_IN_FORM_BIT = 9; private static final int EXTRACT_FOR_ACCESSIBILITY_BIT = 10; private static final int ASSEMBLE_DOCUMENT_BIT = 11; private static final int DEGRADED_PRINT_BIT = 12; /** * Default constructor that uses Version 2, Revision 3, 40 bit encryption, * all permissions allowed. */ public PDStandardEncryption() { super(); encryptionDictionary.setItem( COSName.FILTER, COSName.getPDFName( FILTER_NAME ) ); setVersion( PDEncryptionDictionary.VERSION1_40_BIT_ALGORITHM ); setRevision( PDStandardEncryption.REVISION2 ); setPermissions( DEFAULT_PERMISSIONS ); } /** * Constructor from existing dictionary. * * @param dict The existing encryption dictionary. */ public PDStandardEncryption( COSDictionary dict ) { super( dict ); } /** * This will return the R entry of the encryption dictionary.<br /><br /> * See PDF Reference 1.4 Table 3.14. * * @return The encryption revision to use. */ public int getRevision() { int revision = DEFAULT_VERSION; COSNumber cosRevision = (COSNumber)encryptionDictionary.getDictionaryObject( COSName.getPDFName( "R" ) ); if( cosRevision != null ) { revision = cosRevision.intValue(); } return revision; } /** * This will set the R entry of the encryption dictionary.<br /><br /> * See PDF Reference 1.4 Table 3.14. <br /><br/> * * <b>Note: This value is used to decrypt the pdf document. If you change this when * the document is encrypted then decryption will fail!.</b> * * @param revision The new encryption version. */ public void setRevision( int revision ) { encryptionDictionary.setItem( COSName.getPDFName( "R" ), new COSInteger( revision ) ); } /** * This will get the O entry in the standard encryption dictionary. * * @return A 32 byte array or null if there is no owner key. */ public byte[] getOwnerKey() { byte[] o = null; COSString owner = (COSString)encryptionDictionary.getDictionaryObject( COSName.getPDFName( "O" ) ); if( owner != null ) { o = owner.getBytes(); } return o; } /** * This will set the O entry in the standard encryption dictionary. * * @param o A 32 byte array or null if there is no owner key. * * @throws IOException If there is an error setting the data. */ public void setOwnerKey( byte[] o ) throws IOException { COSString owner = new COSString(); owner.append( o ); encryptionDictionary.setItem( COSName.getPDFName( "O" ), owner ); } /** * This will get the U entry in the standard encryption dictionary. * * @return A 32 byte array or null if there is no user key. */ public byte[] getUserKey() { byte[] u = null; COSString user = (COSString)encryptionDictionary.getDictionaryObject( COSName.getPDFName( "U" ) ); if( user != null ) { u = user.getBytes(); } return u; } /** * This will set the U entry in the standard encryption dictionary. * * @param u A 32 byte array. * * @throws IOException If there is an error setting the data. */ public void setUserKey( byte[] u ) throws IOException { COSString user = new COSString(); user.append( u ); encryptionDictionary.setItem( COSName.getPDFName( "U" ), user ); } /** * This will get the permissions bit mask. * * @return The permissions bit mask. */ public int getPermissions() { int permissions = 0; COSInteger p = (COSInteger)encryptionDictionary.getDictionaryObject( COSName.getPDFName( "P" ) ); if( p != null ) { permissions = p.intValue(); } return permissions; } /** * This will set the permissions bit mask. * * @param p The new permissions bit mask */ public void setPermissions( int p ) { encryptionDictionary.setItem( COSName.getPDFName( "P" ), new COSInteger( p ) ); } private boolean isPermissionBitOn( int bit ) { return (getPermissions() & (1 << (bit-1))) != 0; } private boolean setPermissionBit( int bit, boolean value ) { int permissions = getPermissions(); if( value ) { permissions = permissions | (1 << (bit-1)); } else { permissions = permissions & (0xFFFFFFFF ^ (1 << (bit-1))); } setPermissions( permissions ); return (getPermissions() & (1 << (bit-1))) != 0; } /** * This will tell if the user can print. * * @return true If supplied with the user password they are allowed to print. */ public boolean canPrint() { return isPermissionBitOn( PRINT_BIT ); } /** * Set if the user can print. * * @param allowPrinting A boolean determining if the user can print. */ public void setCanPrint( boolean allowPrinting ) { setPermissionBit( PRINT_BIT, allowPrinting ); } /** * This will tell if the user can modify contents of the document. * * @return true If supplied with the user password they are allowed to modify the document */ public boolean canModify() { return isPermissionBitOn( MODIFICATION_BIT ); } /** * Set if the user can modify the document. * * @param allowModifications A boolean determining if the user can modify the document. */ public void setCanModify( boolean allowModifications ) { setPermissionBit( MODIFICATION_BIT, allowModifications ); } /** * This will tell if the user can extract text and images from the PDF document. * * @return true If supplied with the user password they are allowed to extract content * from the PDF document */ public boolean canExtractContent() { return isPermissionBitOn( EXTRACT_BIT ); } /** * Set if the user can extract content from the document. * * @param allowExtraction A boolean determining if the user can extract content * from the document. */ public void setCanExtractContent( boolean allowExtraction ) { setPermissionBit( EXTRACT_BIT, allowExtraction ); } /** * This will tell if the user can add/modify text annotations, fill in interactive forms fields. * * @return true If supplied with the user password they are allowed to modify annotations. */ public boolean canModifyAnnotations() { return isPermissionBitOn( MODIFY_ANNOTATIONS_BIT ); } /** * Set if the user can modify annotations. * * @param allowAnnotationModification A boolean determining if the user can modify annotations. */ public void setCanModifyAnnotations( boolean allowAnnotationModification ) { setPermissionBit( MODIFY_ANNOTATIONS_BIT, allowAnnotationModification ); } /** * This will tell if the user can fill in interactive forms. * * @return true If supplied with the user password they are allowed to fill in form fields. */ public boolean canFillInForm() { return isPermissionBitOn( FILL_IN_FORM_BIT ); } /** * Set if the user can fill in interactive forms. * * @param allowFillingInForm A boolean determining if the user can fill in interactive forms. */ public void setCanFillInForm( boolean allowFillingInForm ) { setPermissionBit( FILL_IN_FORM_BIT, allowFillingInForm ); } /** * This will tell if the user can extract text and images from the PDF document * for accessibility purposes. * * @return true If supplied with the user password they are allowed to extract content * from the PDF document */ public boolean canExtractForAccessibility() { return isPermissionBitOn( EXTRACT_FOR_ACCESSIBILITY_BIT ); } /** * Set if the user can extract content from the document for accessibility purposes. * * @param allowExtraction A boolean determining if the user can extract content * from the document. */ public void setCanExtractForAccessibility( boolean allowExtraction ) { setPermissionBit( EXTRACT_FOR_ACCESSIBILITY_BIT, allowExtraction ); } /** * This will tell if the user can insert/rotate/delete pages. * * @return true If supplied with the user password they are allowed to extract content * from the PDF document */ public boolean canAssembleDocument() { return isPermissionBitOn( ASSEMBLE_DOCUMENT_BIT ); } /** * Set if the user can insert/rotate/delete pages. * * @param allowAssembly A boolean determining if the user can assemble the document. */ public void setCanAssembleDocument( boolean allowAssembly ) { setPermissionBit( ASSEMBLE_DOCUMENT_BIT, allowAssembly ); } /** * This will tell if the user can print the document in a degraded format. * * @return true If supplied with the user password they are allowed to print the * document in a degraded format. */ public boolean canPrintDegraded() { return isPermissionBitOn( DEGRADED_PRINT_BIT ); } /** * Set if the user can print the document in a degraded format. * * @param allowAssembly A boolean determining if the user can print the * document in a degraded format. */ public void setCanPrintDegraded( boolean allowAssembly ) { setPermissionBit( DEGRADED_PRINT_BIT, allowAssembly ); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.aliyuncs.domain.model.v20180129; import java.util.Map; import com.aliyuncs.AcsResponse; import com.aliyuncs.domain.transform.v20180129.CheckDomainResponseUnmarshaller; import com.aliyuncs.transform.UnmarshallerContext; /** * @author auto create * @version */ public class CheckDomainResponse extends AcsResponse { private String requestId; private String domainName; private String avail; private String premium; private String reason; private Long price; private Boolean dynamicCheck; public String getRequestId() { return this.requestId; } public void setRequestId(String requestId) { this.requestId = requestId; } public String getDomainName() { return this.domainName; } public void setDomainName(String domainName) { this.domainName = domainName; } public String getAvail() { return this.avail; } public void setAvail(String avail) { this.avail = avail; } public String getPremium() { return this.premium; } public void setPremium(String premium) { this.premium = premium; } public String getReason() { return this.reason; } public void setReason(String reason) { this.reason = reason; } public Long getPrice() { return this.price; } public void setPrice(Long price) { this.price = price; } public Boolean getDynamicCheck() { return this.dynamicCheck; } public void setDynamicCheck(Boolean dynamicCheck) { this.dynamicCheck = dynamicCheck; } @Override public CheckDomainResponse getInstance(UnmarshallerContext context) { return CheckDomainResponseUnmarshaller.unmarshall(this, context); } }
package com.sxzheng.tasklibrary; /** * @author zheng. */ public abstract class TimerStackTask<T> extends Task { }
package com.doo.xenchant.mixin; import com.doo.xenchant.events.ItemApi; import com.doo.xenchant.util.EnchantUtil; import com.google.common.collect.Multimap; import net.minecraft.server.level.ServerPlayer; import net.minecraft.world.entity.EquipmentSlot; import net.minecraft.world.entity.ai.attributes.Attribute; import net.minecraft.world.entity.ai.attributes.AttributeModifier; import net.minecraft.world.item.ItemStack; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.ModifyVariable; import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable; import java.util.Random; @Mixin(ItemStack.class) public abstract class ItemStackMixin { @Inject(method = "hurt", at = @At(value = "INVOKE", target = "Lnet/minecraft/world/item/enchantment/EnchantmentHelper;getItemEnchantmentLevel(Lnet/minecraft/world/item/enchantment/Enchantment;Lnet/minecraft/world/item/ItemStack;)I")) private void itemUsedCallback(int amount, Random random, ServerPlayer player, CallbackInfoReturnable<Boolean> cir) { ItemApi.WILL_DAMAGE.invoker().call(player, EnchantUtil.get(this), amount); } @ModifyVariable(method = "getAttributeModifiers", at = @At(value = "LOAD")) private Multimap<Attribute, AttributeModifier> addEnchantmentT(Multimap<Attribute, AttributeModifier> map, EquipmentSlot slot) { ItemApi.GET_MODIFIER.invoker().mod(map, EnchantUtil.get(this), slot); return map; } }
package br.com.randomthings.services.sub_category; import br.com.randomthings.domain.SubCategory; import br.com.randomthings.services.IService; public interface SubCategoryService extends IService<SubCategory, Long> { }
package uk.gov.hmcts.probate.transformer.reset; import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; import uk.gov.hmcts.probate.model.ccd.raw.request.CaseData; import uk.gov.hmcts.probate.model.ccd.raw.response.ResponseCaseData; import uk.gov.hmcts.probate.service.TitleAndClearingTypeService; import static uk.gov.hmcts.probate.model.Constants.NO; @Component @Slf4j @AllArgsConstructor public class ResetResponseCaseDataTransformer { private final TitleAndClearingTypeService titleAndClearingTypeService; public void resetTitleAndClearingFields(CaseData caseData, ResponseCaseData.ResponseCaseDataBuilder<?, ?> builder) { if (titleAndClearingTypeService.partnerTitleAndClearingOptionSelected(caseData)) { nullTrustCorpOptions(builder); if (!titleAndClearingTypeService.successorFirmTitleAndClearingOptionSelected(caseData)) { builder.nameOfSucceededFirm(null); } } else if (titleAndClearingTypeService.trustCorpTitleAndClearingOptionSelected(caseData)) { nullPartnerOptions(builder); } else { nullTrustCorpOptions(builder); nullPartnerOptions(builder); } if (NO.equals(caseData.getDispenseWithNotice())) { nullDispenseWithNoticeOptions(builder); } } private void nullTrustCorpOptions(ResponseCaseData.ResponseCaseDataBuilder<?, ?> builder) { builder .additionalExecutorsTrustCorpList(null) .trustCorpName(null) .trustCorpAddress(null) .lodgementAddress(null) .lodgementDate(null); } private void nullPartnerOptions(ResponseCaseData.ResponseCaseDataBuilder<?, ?> builder) { builder .otherPartnersApplyingAsExecutors(null) .nameOfSucceededFirm(null) .nameOfFirmNamedInWill(null) .addressOfFirmNamedInWill(null) .addressOfSucceededFirm(null) .whoSharesInCompanyProfits(null); } private void nullDispenseWithNoticeOptions(ResponseCaseData.ResponseCaseDataBuilder<?, ?> builder) { builder .dispenseWithNoticeOtherExecsList(null) .dispenseWithNoticeLeaveGiven(null) .dispenseWithNoticeLeaveGivenDate(null) .dispenseWithNoticeOverview(null) .dispenseWithNoticeSupportingDocs(null); } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.updateSettings.impl; import com.intellij.ide.IdeBundle; import com.intellij.ide.plugins.*; import com.intellij.ide.startup.StartupActionScriptManager; import com.intellij.openapi.application.*; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.application.impl.ApplicationInfoImpl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.PluginId; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.BuildNumber; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.PathUtil; import com.intellij.util.Urls; import com.intellij.util.io.HttpRequests; import com.intellij.util.io.ZipUtil; import com.intellij.util.text.VersionComparatorUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Date; import java.util.List; import java.util.Map; /** * @author anna */ public class PluginDownloader { private static final Logger LOG = Logger.getInstance(PluginDownloader.class); private static final String FILENAME = "filename="; private final String myPluginId; private final String myPluginName; private final @Nullable String myProductCode; private final Date myReleaseDate; private final int myReleaseVersion; private final String myDescription; private final List<PluginId> myDepends; private final String myPluginUrl; private final BuildNumber myBuildNumber; private String myPluginVersion; private IdeaPluginDescriptor myDescriptor; private File myFile; private File myOldFile; private PluginDownloader(IdeaPluginDescriptor descriptor, String url, BuildNumber buildNumber) { myPluginId = descriptor.getPluginId().getIdString(); myPluginName = descriptor.getName(); myProductCode = descriptor.getProductCode(); myReleaseDate = descriptor.getReleaseDate(); myReleaseVersion = descriptor.getReleaseVersion(); myDescription = descriptor.getDescription(); myDepends = descriptor instanceof PluginNode ? ((PluginNode)descriptor).getDepends() : Arrays.asList(descriptor.getDependentPluginIds()); myPluginUrl = url; myBuildNumber = buildNumber; myPluginVersion = descriptor.getVersion(); myDescriptor = descriptor; } @NotNull public String getPluginId() { return myPluginId; } public String getPluginVersion() { return myPluginVersion; } @NotNull public String getPluginName() { return myPluginName != null ? myPluginName : myPluginId; } @Nullable public String getProductCode() { return myProductCode; } public Date getReleaseDate() { return myReleaseDate; } public int getReleaseVersion() { return myReleaseVersion; } @Nullable public BuildNumber getBuildNumber() { return myBuildNumber; } @NotNull public IdeaPluginDescriptor getDescriptor() { return myDescriptor; } public File getFile() { return myFile; } public boolean prepareToInstall(@NotNull ProgressIndicator indicator) throws IOException { if (myFile != null) { return true; } IdeaPluginDescriptor descriptor = null; if (!Boolean.getBoolean(StartupActionScriptManager.STARTUP_WIZARD_MODE) && PluginManagerCore.isPluginInstalled(PluginId.getId(myPluginId))) { //store old plugins file descriptor = PluginManagerCore.getPlugin(PluginId.getId(myPluginId)); LOG.assertTrue(descriptor != null); if (myPluginVersion != null && compareVersionsSkipBrokenAndIncompatible(descriptor, myPluginVersion) <= 0) { LOG.info("Plugin " + myPluginId + ": current version (max) " + myPluginVersion); return false; } myOldFile = descriptor.isBundled() ? null : descriptor.getPath(); } // download plugin String errorMessage = null; try { myFile = downloadPlugin(indicator); } catch (IOException ex) { myFile = null; LOG.warn(ex); errorMessage = ex.getMessage(); } if (myFile == null) { Application app = ApplicationManager.getApplication(); if (app != null) { if (errorMessage == null) { errorMessage = IdeBundle.message("unknown.error"); } String text = IdeBundle.message("error.plugin.was.not.installed", getPluginName(), errorMessage); String title = IdeBundle.message("title.failed.to.download"); app.invokeLater(() -> Messages.showErrorDialog(text, title), ModalityState.any()); } return false; } IdeaPluginDescriptorImpl actualDescriptor = loadDescriptionFromJar(myFile); if (actualDescriptor != null) { InstalledPluginsState state = InstalledPluginsState.getInstanceIfLoaded(); if (state != null && state.wasUpdated(actualDescriptor.getPluginId())) { return false; //already updated } myPluginVersion = actualDescriptor.getVersion(); if (descriptor != null && compareVersionsSkipBrokenAndIncompatible(descriptor, myPluginVersion) <= 0) { LOG.info("Plugin " + myPluginId + ": current version (max) " + myPluginVersion); return false; //was not updated } myDescriptor = actualDescriptor; if (PluginManagerCore.isIncompatible(actualDescriptor, myBuildNumber)) { LOG.info("Plugin " + myPluginId + " is incompatible with current installation " + "(since:" + actualDescriptor.getSinceBuild() + " until:" + actualDescriptor.getUntilBuild() + ")"); return false; //host outdated plugins, no compatible plugin for new version } } return true; } public static int compareVersionsSkipBrokenAndIncompatible(@NotNull IdeaPluginDescriptor existingPlugin, String newPluginVersion) { int state = comparePluginVersions(newPluginVersion, existingPlugin.getVersion()); if (state < 0 && (PluginManagerCore.isBrokenPlugin(existingPlugin) || PluginManagerCore.isIncompatible(existingPlugin))) { state = 1; } return state; } public static int comparePluginVersions(String newPluginVersion, String oldPluginVersion) { return VersionComparatorUtil.compare(newPluginVersion, oldPluginVersion); } @Nullable public static IdeaPluginDescriptorImpl loadDescriptionFromJar(final File file) throws IOException { IdeaPluginDescriptorImpl descriptor = PluginManagerCore.loadDescriptor(file, PluginManagerCore.PLUGIN_XML); if (descriptor == null) { if (file.getName().endsWith(".zip")) { final File outputDir = FileUtil.createTempDirectory("plugin", ""); try { ZipUtil.extract(file, outputDir, null); final File[] files = outputDir.listFiles(); if (files != null && files.length == 1) { descriptor = PluginManagerCore.loadDescriptor(files[0], PluginManagerCore.PLUGIN_XML); } } finally { FileUtil.delete(outputDir); } } } return descriptor; } public void install() throws IOException { if (myFile == null) { throw new IOException("Plugin '" + getPluginName() + "' was not successfully downloaded"); } PluginInstaller.installAfterRestart(myFile, true, myOldFile, myDescriptor); InstalledPluginsState state = InstalledPluginsState.getInstanceIfLoaded(); if (state != null) { state.onPluginInstall(myDescriptor, PluginManagerCore.isPluginInstalled(myDescriptor.getPluginId()), true); } } @NotNull private File downloadPlugin(@NotNull ProgressIndicator indicator) throws IOException { File pluginsTemp = new File(PathManager.getPluginTempPath()); if (!pluginsTemp.exists() && !pluginsTemp.mkdirs()) { throw new IOException(IdeBundle.message("error.cannot.create.temp.dir", pluginsTemp)); } indicator.checkCanceled(); indicator.setText2(IdeBundle.message("progress.downloading.plugin", getPluginName())); File file = FileUtil.createTempFile(pluginsTemp, "plugin_", "_download", true, false); return HttpRequests.request(myPluginUrl).gzip(false).productNameAsUserAgent().connect(request -> { request.saveToFile(file, indicator); String fileName = guessFileName(request.getConnection(), file); File newFile = new File(file.getParentFile(), fileName); FileUtil.rename(file, newFile); return newFile; }); } @NotNull private String guessFileName(@NotNull URLConnection connection, @NotNull File file) throws IOException { String fileName = null; final String contentDisposition = connection.getHeaderField("Content-Disposition"); LOG.debug("header: " + contentDisposition); if (contentDisposition != null && contentDisposition.contains(FILENAME)) { final int startIdx = contentDisposition.indexOf(FILENAME); final int endIdx = contentDisposition.indexOf(';', startIdx); fileName = contentDisposition.substring(startIdx + FILENAME.length(), endIdx > 0 ? endIdx : contentDisposition.length()); if (StringUtil.startsWithChar(fileName, '\"') && StringUtil.endsWithChar(fileName, '\"')) { fileName = fileName.substring(1, fileName.length() - 1); } } if (fileName == null) { // try to find a filename in an URL final String usedURL = connection.getURL().toString(); LOG.debug("url: " + usedURL); fileName = usedURL.substring(usedURL.lastIndexOf('/') + 1); if (fileName.length() == 0 || fileName.contains("?")) { fileName = myPluginUrl.substring(myPluginUrl.lastIndexOf('/') + 1); } } if (!PathUtil.isValidFileName(fileName)) { LOG.debug("fileName: " + fileName); FileUtil.delete(file); throw new IOException("Invalid filename returned by a server"); } return fileName; } // creators-converters public static PluginDownloader createDownloader(@NotNull IdeaPluginDescriptor descriptor) throws IOException { return createDownloader(descriptor, null, null); } @NotNull public static PluginDownloader createDownloader(@NotNull IdeaPluginDescriptor descriptor, @Nullable String host, @Nullable BuildNumber buildNumber) throws IOException { String url; try { if (host != null && descriptor instanceof PluginNode) { url = ((PluginNode)descriptor).getDownloadUrl(); if (!new URI(url).isAbsolute()) { url = new URL(new URL(host), url).toExternalForm(); } } else { Application app = ApplicationManager.getApplication(); ApplicationInfoEx appInfo = ApplicationInfoImpl.getShadowInstance(); String buildNumberAsString = buildNumber != null ? buildNumber.asString() : app != null ? ApplicationInfo.getInstance().getApiVersion() : appInfo.getBuild().asString(); Map<String, String> parameters = new LinkedHashMap<>(); parameters.put("action", "download"); parameters.put("id", descriptor.getPluginId().getIdString()); parameters.put("build", buildNumberAsString); parameters.put("uuid", PermanentInstallationID.get()); url = Urls.newFromEncoded(appInfo.getPluginsDownloadUrl()).addParameters(parameters).toExternalForm(); } } catch (URISyntaxException e) { throw new IOException(e); } return new PluginDownloader(descriptor, url, buildNumber); } @NotNull public static PluginNode createPluginNode(@Nullable String host, @NotNull PluginDownloader downloader) { IdeaPluginDescriptor descriptor = downloader.getDescriptor(); if (descriptor instanceof PluginNode) { return (PluginNode)descriptor; } PluginNode node = new PluginNode(PluginId.getId(downloader.getPluginId())); node.setName(downloader.getPluginName()); node.setProductCode(downloader.getProductCode()); node.setReleaseDate(downloader.getReleaseDate()); node.setReleaseVersion(downloader.getReleaseVersion()); node.setVersion(downloader.getPluginVersion()); node.setRepositoryName(host); node.setDownloadUrl(downloader.myPluginUrl); node.setDepends(downloader.myDepends, null); node.setDescription(downloader.myDescription); return node; } }
package com.trojan.ajay.hw_9; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.support.v4.view.ViewPager; import android.support.v7.app.ActionBar; import android.support.v7.app.AppCompatActivity; import android.support.design.widget.TabLayout; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; import android.widget.Toast; import com.facebook.CallbackManager; import com.facebook.FacebookCallback; import com.facebook.FacebookException; import com.facebook.share.Sharer; import com.facebook.share.model.ShareLinkContent; import com.facebook.share.widget.ShareDialog; import org.json.JSONException; import org.json.JSONObject; public class ResultActivity extends AppCompatActivity { ViewPager viewPager; TabLayout tabLayout; Menu globalMenu; ShareDialog shareDialog; CallbackManager callbackManager; Pager adapter; JSONObject result; boolean faventry; StockModel stockModel; boolean fav = false; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_result); ActionBar actionBar = getSupportActionBar(); actionBar.setDisplayHomeAsUpEnabled(true); try { result = new JSONObject(getIntent().getExtras().getString("result")); stockModel = new StockModel(result); actionBar.setTitle(stockModel.Name); } catch (JSONException e) { e.printStackTrace(); } callbackManager = CallbackManager.Factory.create(); shareDialog = new ShareDialog(this); shareDialog.registerCallback(callbackManager, new FacebookCallback<Sharer.Result>() { @Override public void onSuccess(Sharer.Result result) { if(result.getPostId() == null) { Toast.makeText(getApplicationContext(), "Post Cancelled", Toast.LENGTH_SHORT).show(); } else { Toast.makeText(getApplicationContext(), "FB Post Successful", Toast.LENGTH_SHORT).show(); } } @Override public void onCancel() { Toast.makeText(getApplicationContext(), "Post Cancelled", Toast.LENGTH_SHORT).show(); } @Override public void onError(FacebookException error) { Toast.makeText(getApplicationContext(), "Post Failed", Toast.LENGTH_SHORT).show(); } }); tabLayout = (TabLayout) findViewById(R.id.tabs); tabLayout.addTab(tabLayout.newTab().setText("Current")); tabLayout.addTab(tabLayout.newTab().setText("Historical")); tabLayout.addTab(tabLayout.newTab().setText("News")); tabLayout.setTabGravity(TabLayout.GRAVITY_FILL); viewPager = (ViewPager) findViewById(R.id.pager); adapter = new Pager(getSupportFragmentManager(), result); viewPager.setAdapter(adapter); viewPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {} @Override public void onPageSelected(int position) { tabLayout.setScrollPosition(position, 0, true); tabLayout.setSelected(true); } @Override public void onPageScrollStateChanged(int state) {} }); //Adding onTabSelectedListener to swipe views tabLayout.setOnTabSelectedListener(new TabLayout.OnTabSelectedListener() { @Override public void onTabSelected(TabLayout.Tab tab) { viewPager.setCurrentItem(tab.getPosition()); } @Override public void onTabUnselected(TabLayout.Tab tab) {} @Override public void onTabReselected(TabLayout.Tab tab) {} }); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.result_menu, menu); globalMenu = menu; SharedPreferences settings = getSharedPreferences(Utils.SHARED_PREF_FAVLIST, 0); faventry = settings.getString(stockModel.Symbol, null) == null ? false : true; if (faventry) { menu.getItem(0).setIcon(android.R.drawable.btn_star_big_on); } else { menu.getItem(0).setIcon(android.R.drawable.btn_star_big_off); } return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { SharedPreferences favorites = getSharedPreferences(Utils.SHARED_PREF_FAVLIST, 0); SharedPreferences.Editor editor = favorites.edit(); switch (item.getItemId()) { case android.R.id.home: onBackPressed(); return true; case R.id.favorite: // Make favorite if(!faventry) { String json = Utils.serialize(new FavoriteModel(false, stockModel.Symbol, stockModel.Name, stockModel.LastPrice, stockModel.ChangePercent, stockModel.MarketCap)); editor.putString(stockModel.Symbol, json); editor.commit(); globalMenu.getItem(0).setIcon(android.R.drawable.btn_star_big_on); faventry = true; } else { editor.remove(stockModel.Symbol); editor.apply(); globalMenu.getItem(0).setIcon(android.R.drawable.btn_star_big_off); faventry = false; } return true; case R.id.fb_share: String name = "Current Stock price of " + stockModel.Symbol + " is $" + stockModel.LastPrice; Uri picture = Uri.parse("http://chart.finance.yahoo.com/t?s="+ stockModel.Symbol + "&width=150&height=150&lang=en-US"); String caption= "LAST TRADED PRICE: $"+stockModel.LastPrice+" CHANGE: "+ stockModel.Change; String link = "http://dev.markitondemand.com/"; String description = "Stock Information of "+stockModel.Name+" ("+stockModel.Symbol+")"; if (ShareDialog.canShow(ShareLinkContent.class)) { ShareLinkContent content = new ShareLinkContent.Builder() .setContentUrl(Uri.parse(link)) .setContentTitle(name) .setImageUrl(picture) .setContentDescription(description) .setQuote(caption) .build(); shareDialog.show(content, ShareDialog.Mode.FEED); } return true; } return super.onOptionsItemSelected(item); } @Override protected void onActivityResult(final int requestCode, final int resultCode, final Intent data) { super.onActivityResult(requestCode, resultCode, data); callbackManager.onActivityResult(requestCode, resultCode, data); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.directory.smoketests; import javax.annotation.Generated; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Stage; import cucumber.api.guice.CucumberModules; import cucumber.runtime.java.guice.InjectorSource; import com.amazonaws.AmazonWebServiceClient; import com.amazonaws.services.directory.AWSDirectoryServiceClient; /** * Injector that binds the AmazonWebServiceClient interface to the * com.amazonaws.services.directory.AWSDirectoryServiceClient */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AWSDirectoryServiceModuleInjector implements InjectorSource { @Override public Injector getInjector() { return Guice.createInjector(Stage.PRODUCTION, CucumberModules.SCENARIO, new AWSDirectoryServiceModule()); } static class AWSDirectoryServiceModule extends AbstractModule { @Override protected void configure() { bind(AmazonWebServiceClient.class).to(AWSDirectoryServiceClient.class); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: proto/clarifai/api/service.proto package com.clarifai.grpc.api; public interface ModelVersionPublishRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:clarifai.api.ModelVersionPublishRequest) com.google.protobuf.MessageOrBuilder { /** * <code>string version_id = 1;</code> * @return The versionId. */ java.lang.String getVersionId(); /** * <code>string version_id = 1;</code> * @return The bytes for versionId. */ com.google.protobuf.ByteString getVersionIdBytes(); }
/* * Copyright (c) 2008-2016, GigaSpaces Technologies, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gigaspaces.internal.cluster.node.impl.backlog.globalorder; import com.gigaspaces.internal.cluster.node.impl.backlog.BacklogConfig; @com.gigaspaces.api.InternalApi public class GlobalOrderBacklogConfig extends BacklogConfig { @Override public GlobalOrderBacklogConfig clone() { GlobalOrderBacklogConfig clone = new GlobalOrderBacklogConfig(); clone.overrideWithOther(this); return clone; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.cosmos.models; import com.azure.core.util.IterableStream; import com.azure.core.util.paging.ContinuablePage; import com.azure.cosmos.BridgeInternal; import com.azure.cosmos.CosmosDiagnostics; import com.azure.cosmos.implementation.Constants; import com.azure.cosmos.implementation.HttpConstants; import com.azure.cosmos.implementation.ImplementationBridgeHelpers; import com.azure.cosmos.implementation.QueryMetrics; import com.azure.cosmos.implementation.QueryMetricsConstants; import com.azure.cosmos.implementation.RxDocumentServiceResponse; import com.azure.cosmos.implementation.Strings; import com.azure.cosmos.implementation.apachecommons.lang.StringUtils; import com.azure.cosmos.implementation.query.QueryInfo; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.regex.Pattern; /** * The type Feed response. * * @param <T> the type parameter */ public class FeedResponse<T> implements ContinuablePage<String, T> { private static final Pattern DELIMITER_CHARS_PATTERN = Pattern.compile(Constants.Quota.DELIMITER_CHARS); private final List<T> results; private final Map<String, String> header; private final HashMap<String, Long> usageHeaders; private final HashMap<String, Long> quotaHeaders; private final boolean useEtagAsContinuation; final boolean nochanges; private final ConcurrentMap<String, QueryMetrics> queryMetricsMap; private final static String defaultPartition = "0"; private CosmosDiagnostics cosmosDiagnostics; private QueryInfo queryInfo; private QueryInfo.QueryPlanDiagnosticsContext queryPlanDiagnosticsContext; FeedResponse(List<T> results, Map<String, String> headers) { this(results, headers, false, false, new ConcurrentHashMap<>()); } // TODO: probably have to add two booleans FeedResponse(List<T> results, RxDocumentServiceResponse response) { this(results, response.getResponseHeaders(), false, false, new ConcurrentHashMap<>()); this.cosmosDiagnostics =response.getCosmosDiagnostics(); if (this.cosmosDiagnostics != null) { BridgeInternal.setFeedResponseDiagnostics(this.cosmosDiagnostics, queryMetricsMap); } } FeedResponse( List<T> results, Map<String, String> headers, ConcurrentMap<String, QueryMetrics> queryMetricsMap, boolean useEtagAsContinuation, boolean isNoChanges) { this(results, headers, useEtagAsContinuation, isNoChanges, queryMetricsMap); } FeedResponse(List<T> results, Map<String, String> header, boolean nochanges) { this(results, header, true, nochanges, new ConcurrentHashMap<>()); } // TODO: need to more sure the query metrics can round trip just from the headers. // We can then remove it as a parameter. private FeedResponse( List<T> results, Map<String, String> header, boolean useEtagAsContinuation, boolean nochanges, ConcurrentMap<String, QueryMetrics> queryMetricsMap) { this.results = results; this.header = header; this.usageHeaders = new HashMap<>(); this.quotaHeaders = new HashMap<>(); this.useEtagAsContinuation = useEtagAsContinuation; this.nochanges = nochanges; this.queryMetricsMap = new ConcurrentHashMap<>(queryMetricsMap); this.cosmosDiagnostics = BridgeInternal.createCosmosDiagnostics(queryMetricsMap); } /** * Results. * * @return the list of results. */ public List<T> getResults() { return results; } /** * Gets the maximum quota for database resources within the account from the Azure Cosmos DB service. * * @return The maximum quota for the account. */ public long getDatabaseQuota() { return this.maxQuotaHeader(Constants.Quota.DATABASE); } /** * Gets the current number of database resources within the account from the Azure Cosmos DB service. * * @return The current number of databases. */ public long getDatabaseUsage() { return this.currentQuotaHeader(Constants.Quota.DATABASE); } /** * Gets the maximum quota for container resources within an account from the Azure Cosmos DB service. * * @return The maximum quota for the account. */ public long getCollectionQuota() { return this.maxQuotaHeader(Constants.Quota.COLLECTION); } /** * Gets the current number of container resources within the account from the Azure Cosmos DB service. * * @return The current number of containers. */ public long getCollectionUsage() { return this.currentQuotaHeader(Constants.Quota.COLLECTION); } /** * Gets the maximum quota for user resources within an account from the Azure Cosmos DB service. * * @return The maximum quota for the account. */ public long getUserQuota() { return this.maxQuotaHeader(Constants.Quota.USER); } /** * Gets the current number of user resources within the account from the Azure Cosmos DB service. * * @return The current number of users. */ public long getUserUsage() { return this.currentQuotaHeader(Constants.Quota.USER); } /** * Gets the maximum quota for permission resources within an account from the Azure Cosmos DB service. * * @return The maximum quota for the account. */ public long getPermissionQuota() { return this.maxQuotaHeader(Constants.Quota.PERMISSION); } /** * Gets the current number of permission resources within the account from the Azure Cosmos DB service. * * @return The current number of permissions. */ public long getPermissionUsage() { return this.currentQuotaHeader(Constants.Quota.PERMISSION); } /** * Gets the maximum size of a container in kilobytes from the Azure Cosmos DB service. * * @return The maximum quota in kilobytes. */ public long getCollectionSizeQuota() { return this.maxQuotaHeader(Constants.Quota.COLLECTION_SIZE); } /** * Gets the current size of a container in kilobytes from the Azure Cosmos DB service. * * @return The current size of a container in kilobytes. */ public long getCollectionSizeUsage() { return this.currentQuotaHeader(Constants.Quota.COLLECTION_SIZE); } /** * Gets the current size of the documents in a container in kilobytes from the Azure Cosmos DB service. * * @return The current size of a container in kilobytes. */ public long getDocumentUsage() { return this.currentQuotaHeader(Constants.Quota.DOCUMENTS_SIZE); } /** * Current document count usage. * * @return the document count usage. */ public long getDocumentCountUsage() { return this.currentQuotaHeader(Constants.Quota.DOCUMENTS_COUNT); } /** * Gets the maximum quota of stored procedures for a container from the Azure Cosmos DB service. * * @return The maximum stored procedure quota. */ public long getStoredProceduresQuota() { return this.maxQuotaHeader(Constants.Quota.STORED_PROCEDURE); } /** * Gets the current number of stored procedures for a container from the Azure Cosmos DB service. * * @return The current number of stored procedures. */ public long getStoredProceduresUsage() { return this.currentQuotaHeader(Constants.Quota.STORED_PROCEDURE); } /** * Gets the maximum quota of triggers for a container from the Azure Cosmos DB service. * * @return The maximum triggers quota. */ public long getTriggersQuota() { return this.maxQuotaHeader(Constants.Quota.TRIGGER); } /** * Get the current number of triggers for a container from the Azure Cosmos DB service. * * @return The current number of triggers. */ public long getTriggersUsage() { return this.currentQuotaHeader(Constants.Quota.TRIGGER); } /** * Gets the maximum quota of user defined functions for a container from the Azure Cosmos DB service. * * @return The maximum user defined functions quota. */ public long getUserDefinedFunctionsQuota() { return this.maxQuotaHeader(Constants.Quota.USER_DEFINED_FUNCTION); } /** * Gets the current number of user defined functions for a container from the Azure Cosmos DB service. * * @return the current number of user defined functions. */ public long getUserDefinedFunctionsUsage() { return this.currentQuotaHeader(Constants.Quota.USER_DEFINED_FUNCTION); } /** * Gets the maximum size limit for this entity from the Azure Cosmos DB service. * * @return the maximum size limit for this entity. * Measured in kilobytes for item resources and in counts for other resources. */ public String getMaxResourceQuota() { return getValueOrNull(header, HttpConstants.HttpHeaders.MAX_RESOURCE_QUOTA); } /** * Gets the current size of this entity from the Azure Cosmos DB service. * * @return the current size for this entity. Measured in kilobytes for item resources * and in counts for other resources. */ public String getCurrentResourceQuotaUsage() { return getValueOrNull(header, HttpConstants.HttpHeaders.CURRENT_RESOURCE_QUOTA_USAGE); } /** * Gets the request charge as request units (RU) consumed by the operation. * <p> * For more information about the RU and factors that can impact the effective charges please visit * <a href="https://docs.microsoft.com/en-us/azure/cosmos-db/request-units">Request Units in Azure Cosmos DB</a> * * @return the request charge. */ public double getRequestCharge() { String value = getValueOrNull(header, HttpConstants.HttpHeaders.REQUEST_CHARGE); if (StringUtils.isEmpty(value)) { return 0; } return Double.parseDouble(value); } /** * Gets the activity ID for the request. * * @return the activity id. */ public String getActivityId() { return getValueOrNull(header, HttpConstants.HttpHeaders.ACTIVITY_ID); } /** * Gets the correlation activity ID for the responses of a query operation or null if * no correlation activity id is present * * @return the correlation activity id or null if no correlation activity id is present. */ public UUID getCorrelationActivityId() { String correlationActivityIdAsString = getValueOrNull(header, HttpConstants.HttpHeaders.CORRELATED_ACTIVITY_ID); if (!Strings.isNullOrWhiteSpace(correlationActivityIdAsString)) { return UUID.fromString(correlationActivityIdAsString); } return null; } @Override public IterableStream<T> getElements() { return IterableStream.of(this.results); } /** * Gets the continuation token to be used for continuing the enumeration. * * @return the response continuation. */ public String getContinuationToken() { String headerName = useEtagAsContinuation ? HttpConstants.HttpHeaders.E_TAG : HttpConstants.HttpHeaders.CONTINUATION; return getValueOrNull(header, headerName); } /** * Sets the continuation token to be used for continuing the enumeration. * * @param continuationToken updates the continuation token header of the response */ void setContinuationToken(String continuationToken) { String headerName = useEtagAsContinuation ? HttpConstants.HttpHeaders.E_TAG : HttpConstants.HttpHeaders.CONTINUATION; if (!Strings.isNullOrWhiteSpace(continuationToken)) { this.header.put(headerName, continuationToken); } else { this.header.remove(headerName); } } boolean getNoChanges() { return this.nochanges; } /** * Gets the session token for use in session consistency. * * @return the session token. */ public String getSessionToken() { return getValueOrNull(header, HttpConstants.HttpHeaders.SESSION_TOKEN); } /** * Gets the response headers. * * @return the response headers. */ public Map<String, String> getResponseHeaders() { return header; } private String getQueryMetricsString() { return getValueOrNull(getResponseHeaders(), HttpConstants.HttpHeaders.QUERY_METRICS); } /** * Gets the feed response diagnostics * * @return Feed response diagnostics */ public CosmosDiagnostics getCosmosDiagnostics() { return this.cosmosDiagnostics; } ConcurrentMap<String, QueryMetrics> queryMetrics() { if (queryMetricsMap != null && !queryMetricsMap.isEmpty()) { return queryMetricsMap; } //We parse query metrics for un-partitioned container here if (!StringUtils.isEmpty(getQueryMetricsString())) { String qm = getQueryMetricsString(); qm += String.format(";%s=%.2f", QueryMetricsConstants.RequestCharge, getRequestCharge()); queryMetricsMap.put(defaultPartition, QueryMetrics.createFromDelimitedString(qm)); } return queryMetricsMap; } ConcurrentMap<String, QueryMetrics> queryMetricsMap() { return queryMetricsMap; } private long currentQuotaHeader(String headerName) { if (this.usageHeaders.size() == 0 && !StringUtils.isEmpty(this.getMaxResourceQuota()) && !StringUtils.isEmpty(this.getCurrentResourceQuotaUsage())) { this.populateQuotaHeader(this.getMaxResourceQuota(), this.getCurrentResourceQuotaUsage()); } if (this.usageHeaders.containsKey(headerName)) { return this.usageHeaders.get(headerName); } return 0; } private long maxQuotaHeader(String headerName) { if (this.quotaHeaders.size() == 0 && !StringUtils.isEmpty(this.getMaxResourceQuota()) && !StringUtils.isEmpty(this.getCurrentResourceQuotaUsage())) { this.populateQuotaHeader(this.getMaxResourceQuota(), this.getCurrentResourceQuotaUsage()); } if (this.quotaHeaders.containsKey(headerName)) { return this.quotaHeaders.get(headerName); } return 0; } private void populateQuotaHeader(String headerMaxQuota, String headerCurrentUsage) { String[] headerMaxQuotaWords = DELIMITER_CHARS_PATTERN.split(headerMaxQuota, -1); String[] headerCurrentUsageWords = DELIMITER_CHARS_PATTERN.split(headerCurrentUsage, -1); for (int i = 0; i < headerMaxQuotaWords.length; ++i) { if (headerMaxQuotaWords[i].equalsIgnoreCase(Constants.Quota.DATABASE)) { this.quotaHeaders.put(Constants.Quota.DATABASE, Long.valueOf(headerMaxQuotaWords[i + 1])); this.usageHeaders.put(Constants.Quota.DATABASE, Long.valueOf(headerCurrentUsageWords[i + 1])); } else if (headerMaxQuotaWords[i].equalsIgnoreCase(Constants.Quota.COLLECTION)) { this.quotaHeaders.put(Constants.Quota.COLLECTION, Long.valueOf(headerMaxQuotaWords[i + 1])); this.usageHeaders.put(Constants.Quota.COLLECTION, Long.valueOf(headerCurrentUsageWords[i + 1])); } else if (headerMaxQuotaWords[i].equalsIgnoreCase(Constants.Quota.USER)) { this.quotaHeaders.put(Constants.Quota.USER, Long.valueOf(headerMaxQuotaWords[i + 1])); this.usageHeaders.put(Constants.Quota.USER, Long.valueOf(headerCurrentUsageWords[i + 1])); } else if (headerMaxQuotaWords[i].equalsIgnoreCase(Constants.Quota.PERMISSION)) { this.quotaHeaders.put(Constants.Quota.PERMISSION, Long.valueOf(headerMaxQuotaWords[i + 1])); this.usageHeaders.put(Constants.Quota.PERMISSION, Long.valueOf(headerCurrentUsageWords[i + 1])); } else if (headerMaxQuotaWords[i].equalsIgnoreCase(Constants.Quota.COLLECTION_SIZE)) { this.quotaHeaders.put(Constants.Quota.COLLECTION_SIZE, Long.valueOf(headerMaxQuotaWords[i + 1])); this.usageHeaders.put(Constants.Quota.COLLECTION_SIZE, Long.valueOf(headerCurrentUsageWords[i + 1])); } else if (headerMaxQuotaWords[i].equalsIgnoreCase(Constants.Quota.STORED_PROCEDURE)) { this.quotaHeaders.put(Constants.Quota.STORED_PROCEDURE, Long.valueOf(headerMaxQuotaWords[i + 1])); this.usageHeaders.put(Constants.Quota.STORED_PROCEDURE, Long.valueOf(headerCurrentUsageWords[i + 1])); } else if (headerMaxQuotaWords[i].equalsIgnoreCase(Constants.Quota.TRIGGER)) { this.quotaHeaders.put(Constants.Quota.TRIGGER, Long.valueOf(headerMaxQuotaWords[i + 1])); this.usageHeaders.put(Constants.Quota.TRIGGER, Long.valueOf(headerCurrentUsageWords[i + 1])); } else if (headerMaxQuotaWords[i].equalsIgnoreCase(Constants.Quota.DOCUMENTS_SIZE)) { this.quotaHeaders.put(Constants.Quota.DOCUMENTS_SIZE, Long.valueOf(headerMaxQuotaWords[i + 1])); this.usageHeaders.put(Constants.Quota.DOCUMENTS_SIZE, Long.valueOf(headerCurrentUsageWords[i + 1])); } else if (headerMaxQuotaWords[i].equalsIgnoreCase(Constants.Quota.DOCUMENTS_COUNT)) { this.quotaHeaders.put(Constants.Quota.DOCUMENTS_COUNT, Long.valueOf(headerMaxQuotaWords[i + 1])); this.usageHeaders.put(Constants.Quota.DOCUMENTS_COUNT, Long.valueOf(headerCurrentUsageWords[i + 1])); } else if (headerMaxQuotaWords[i].equalsIgnoreCase(Constants.Quota.USER_DEFINED_FUNCTION)) { this.quotaHeaders.put(Constants.Quota.USER_DEFINED_FUNCTION, Long.valueOf(headerMaxQuotaWords[i + 1])); this.usageHeaders.put(Constants.Quota.USER_DEFINED_FUNCTION, Long.valueOf(headerCurrentUsageWords[i + 1])); } } } private static String getValueOrNull(Map<String, String> map, String key) { if (map != null) { return map.get(key); } return null; } void setQueryInfo(QueryInfo queryInfo) { this.queryInfo = queryInfo; } QueryInfo getQueryInfo() { return this.queryInfo; } QueryInfo.QueryPlanDiagnosticsContext getQueryPlanDiagnosticsContext() { return queryPlanDiagnosticsContext; } void setQueryPlanDiagnosticsContext(QueryInfo.QueryPlanDiagnosticsContext queryPlanDiagnosticsContext) { this.queryPlanDiagnosticsContext = queryPlanDiagnosticsContext; BridgeInternal.setQueryPlanDiagnosticsContext(cosmosDiagnostics, queryPlanDiagnosticsContext); } /////////////////////////////////////////////////////////////////////////////////////////// // the following helper/accessor only helps to access this class outside of this package.// /////////////////////////////////////////////////////////////////////////////////////////// static { ImplementationBridgeHelpers.FeedResponseHelper.setFeedResponseAccessor( new ImplementationBridgeHelpers.FeedResponseHelper.FeedResponseAccessor() { @Override public <T> boolean getNoChanges(FeedResponse<T> feedResponse) { return feedResponse.getNoChanges(); } }); } }
/** * Copyright (c) 2016 eBay Software Foundation. All rights reserved. * * Licensed under the MIT license. * * Permission is hereby granted, free of charge, to any person obtaining a copy * * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.ebayopensource.winder; /** * @author Sheldon Shao xshao@ebay.com on 10/16/16. * @version 1.0 */ public class WinderScheduleException extends WinderException { public WinderScheduleException(String msg) { super(msg); } public WinderScheduleException(String msg, Throwable cause) { super(msg, cause); } }
package me.zuichu.staticdemo.base; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v7.app.AppCompatActivity; /** * Created by office on 2018/4/13. */ public class BaseActivity extends AppCompatActivity { @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); } }
package com.github.marcindabrowski.example.nbpcurrencyexchange.domain.service; import java.math.BigDecimal; import static java.math.RoundingMode.HALF_UP; class CurrencyExchanger { private static final int RESULT_PRECISION = 2; public BigDecimal getAccountBalanceAfterExchange(BigDecimal accountBalance, BigDecimal exchangeRate) { if (accountBalance.compareTo(BigDecimal.ZERO) > 0) { return accountBalance.multiply(exchangeRate).setScale(RESULT_PRECISION, HALF_UP); } return BigDecimal.ZERO; } }
package com.jetbrains.ther.psi.stubs; import com.intellij.lang.ASTNode; import com.intellij.psi.PsiElement; import com.intellij.psi.stubs.*; import com.intellij.util.io.StringRef; import com.jetbrains.ther.psi.TheRAssignmentStatementImpl; import com.jetbrains.ther.psi.api.TheRAssignmentStatement; import com.jetbrains.ther.psi.api.TheRFunctionExpression; import com.jetbrains.ther.psi.api.TheRPsiElement; import org.jetbrains.annotations.NotNull; import java.io.IOException; public class TheRAssignmentElementType extends TheRStubElementType<TheRAssignmentStub, TheRAssignmentStatement> { public TheRAssignmentElementType(@NotNull final String debugName) { super(debugName); } @Override public PsiElement createElement(@NotNull final ASTNode node) { return new TheRAssignmentStatementImpl(node); } @Override public TheRAssignmentStatement createPsi(@NotNull final TheRAssignmentStub stub) { return new TheRAssignmentStatementImpl(stub, this); } @Override public TheRAssignmentStub createStub(@NotNull TheRAssignmentStatement psi, StubElement parentStub) { final String name = psi.getName(); final TheRPsiElement value = psi.getAssignedValue(); return new TheRAssignmentStubImpl(name, parentStub, this, value instanceof TheRFunctionExpression); } @Override public void serialize(@NotNull final TheRAssignmentStub stub, @NotNull final StubOutputStream dataStream) throws IOException { dataStream.writeName(stub.getName()); dataStream.writeBoolean(stub.isFunctionDeclaration()); } @Override @NotNull public TheRAssignmentStub deserialize(@NotNull final StubInputStream dataStream, final StubElement parentStub) throws IOException { String name = StringRef.toString(dataStream.readName()); final boolean isFunctionDefinition = dataStream.readBoolean(); return new TheRAssignmentStubImpl(name, parentStub, this, isFunctionDefinition); } @Override public void indexStub(@NotNull final TheRAssignmentStub stub, @NotNull final IndexSink sink) { final String name = stub.getName(); if (name != null && stub.getParentStub() instanceof PsiFileStub && stub.isFunctionDeclaration()) { sink.occurrence(TheRAssignmentNameIndex.KEY, name); } } }
package com.hc.musclesensordemo.activity; import android.os.Bundle; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import com.hc.basiclibrary.ioc.OnClick; import com.hc.basiclibrary.ioc.ViewById; import com.hc.basiclibrary.titleBasic.DefaultNavigationBar; import com.hc.basiclibrary.viewBasic.BasActivity; import com.hc.musclesensordemo.R; import com.hc.musclesensordemo.activity.single.HoldBluetooth; import com.hc.musclesensordemo.customView.CustomButtonView; import com.hc.musclesensordemo.storage.Storage; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; public class DebugActivity extends BasActivity implements View.OnClickListener{ private Storage mStorage; @ViewById(R.id.debug_development_mode) private CustomButtonView customButtonView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_debug); initViev(); initTitle(); initData(); } @Override public void initAll() { } private void initData() { } private void initViev() { super.setContext(this); setButton(R.id.debug_read); if (mStorage == null) mStorage = new Storage(this); if(mStorage.getData(HoldBluetooth.DEVELOPMENT_MODE_KEY)){ customButtonView.staysOn(); }else { customButtonView.closed(); } } @Override public void onClick(View v) { TextView textView = findViewById(R.id.bug_log); textView.setText(load("errNewLog")); } @OnClick(R.id.debug_development_mode) private void switchDevelopment(View view){ mStorage.saveData(HoldBluetooth.DEVELOPMENT_MODE_KEY,!customButtonView.isChick()); customButtonView.toggle(); } private void initTitle(){ new DefaultNavigationBar .Builder(this, (ViewGroup) findViewById(R.id.debug_activity)) .setTitle("Bug日志") .setRightText("") .builer(); } public String load(String file){ FileInputStream in; BufferedReader reader = null; StringBuilder content = new StringBuilder(); try{ in = openFileInput(file); reader = new BufferedReader(new InputStreamReader(in)); String line ; while ((line = reader.readLine()) != null){ content.append(line); } }catch (IOException e){ e.printStackTrace(); }finally { if(reader != null){ try{ reader.close(); }catch (IOException e){ e.printStackTrace(); } } } if(!content.toString().isEmpty()) return content.toString(); else return "没有记录到异常"; } public void setButton(int view){ View textView = findViewById(view); textView.setOnClickListener(this); } }
package com.softeng.ticket_application.model; /** * * * @author Argiris Sideris */ public class SimpleGate { private int id; private String name; private int capacity; private int occupied; public SimpleGate() { // Default constructor } public int getId() { return id; } public void setId(final int id) { this.id = id; } public String getName() { return name; } public void setName(final String name) { this.name = name; } public int getCapacity() { return capacity; } public void setCapacity(final int capacity) { this.capacity = capacity; } public int getOccupied() { return occupied; } public void setOccupied(final int occupied) { this.occupied = occupied; } }
/* * Copyright 2016-2019 David Karnok * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package hu.akarnokd.rxjava3.consumers; import static org.junit.Assert.*; import java.io.IOException; import java.util.*; import org.junit.Test; import hu.akarnokd.rxjava3.test.TestHelper; import io.reactivex.rxjava3.core.*; import io.reactivex.rxjava3.disposables.*; import io.reactivex.rxjava3.exceptions.CompositeException; import io.reactivex.rxjava3.functions.*; import io.reactivex.rxjava3.observers.LambdaConsumerIntrospection; import io.reactivex.rxjava3.plugins.RxJavaPlugins; import io.reactivex.rxjava3.subjects.MaybeSubject; public class MaybeConsumersTest implements Consumer<Object>, Action { final CompositeDisposable composite = new CompositeDisposable(); final MaybeSubject<Integer> processor = MaybeSubject.create(); final List<Object> events = new ArrayList<>(); @Override public void run() throws Exception { events.add("OnComplete"); } @Override public void accept(Object t) throws Exception { events.add(t); } @Test public void utilityClass() { TestHelper.checkUtilityClass(MaybeConsumers.class); } @Test public void onSuccessNormal() { Disposable d = MaybeConsumers.subscribeAutoDispose(processor, composite, this); assertFalse(d.getClass().toString(), ((LambdaConsumerIntrospection)d).hasCustomOnError()); assertTrue(composite.size() > 0); assertTrue(events.toString(), events.isEmpty()); processor.onSuccess(1); assertEquals(0, composite.size()); assertEquals(Arrays.<Object>asList(1), events); } @Test public void onErrorNormal() { MaybeConsumers.subscribeAutoDispose(processor, composite, this, this); assertTrue(composite.size() > 0); assertTrue(events.toString(), events.isEmpty()); processor.onSuccess(1); assertEquals(0, composite.size()); assertEquals(Arrays.<Object>asList(1), events); } @Test public void onErrorError() { Disposable d = MaybeConsumers.subscribeAutoDispose(processor, composite, this, this); assertTrue(d.getClass().toString(), ((LambdaConsumerIntrospection)d).hasCustomOnError()); assertTrue(composite.size() > 0); assertTrue(events.toString(), events.isEmpty()); processor.onError(new IOException()); assertTrue(events.toString(), events.get(0) instanceof IOException); assertEquals(0, composite.size()); } @Test public void onCompleteNormal() { MaybeConsumers.subscribeAutoDispose(processor, composite, this, this, this); assertTrue(composite.size() > 0); assertTrue(events.toString(), events.isEmpty()); processor.onComplete(); assertEquals(0, composite.size()); assertEquals(Arrays.<Object>asList("OnComplete"), events); } @Test public void onCompleteError() { MaybeConsumers.subscribeAutoDispose(processor, composite, this, this, this); assertTrue(composite.size() > 0); assertTrue(events.toString(), events.isEmpty()); processor.onError(new IOException()); assertTrue(events.toString(), events.get(0) instanceof IOException); assertEquals(0, composite.size()); } @Test public void onCompleteDispose() { Disposable d = MaybeConsumers.subscribeAutoDispose(processor, composite, this, this, this); assertTrue(composite.size() > 0); assertTrue(events.toString(), events.isEmpty()); assertFalse(d.isDisposed()); d.dispose(); d.dispose(); assertTrue(d.isDisposed()); assertEquals(0, composite.size()); assertFalse(processor.hasObservers()); } @Test public void onSuccessCrash() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { MaybeConsumers.subscribeAutoDispose(processor, composite, new Consumer<Object>() { @Override public void accept(Object t) throws Exception { throw new IOException(); } }, this, this); processor.onSuccess(1); assertTrue(events.toString(), events.isEmpty()); TestHelper.assertUndeliverable(errors, 0, IOException.class); } finally { RxJavaPlugins.reset(); } } @Test public void onErrorCrash() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { MaybeConsumers.subscribeAutoDispose(processor, composite, this, new Consumer<Throwable>() { @Override public void accept(Throwable t) throws Exception { throw new IOException(t); } }, this); processor.onError(new IllegalArgumentException()); assertTrue(events.toString(), events.isEmpty()); TestHelper.assertError(errors, 0, CompositeException.class); List<Throwable> inners = TestHelper.compositeList(errors.get(0)); TestHelper.assertError(inners, 0, IllegalArgumentException.class); TestHelper.assertError(inners, 1, IOException.class); } finally { RxJavaPlugins.reset(); } } @Test public void onCompleteCrash() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { MaybeConsumers.subscribeAutoDispose(processor, composite, this, this, new Action() { @Override public void run() throws Exception { throw new IOException(); } }); processor.onComplete(); assertTrue(events.toString(), events.isEmpty()); TestHelper.assertUndeliverable(errors, 0, IOException.class); } finally { RxJavaPlugins.reset(); } } @Test public void badSource() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { MaybeConsumers.subscribeAutoDispose( new Maybe<Integer>() { @Override protected void subscribeActual( MaybeObserver<? super Integer> observer) { observer.onSubscribe(Disposable.empty()); observer.onComplete(); observer.onSubscribe(Disposable.empty()); observer.onSuccess(2); observer.onComplete(); observer.onError(new IOException()); } }, composite, this, this, this ); assertEquals(Arrays.<Object>asList("OnComplete"), events); TestHelper.assertUndeliverable(errors, 0, IOException.class); } finally { RxJavaPlugins.reset(); } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.psi; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.javadoc.PsiDocComment; import com.intellij.psi.javadoc.PsiDocTag; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public interface PsiJavaParserFacade { /** * Creates a JavaDoc tag from the specified text. * * @param text the text of the JavaDoc tag. * @return the created tag. * @throws IncorrectOperationException if the text of the tag is not valid. */ @NotNull PsiDocTag createDocTagFromText(@NotNull @NonNls String text) throws IncorrectOperationException; /** * Creates a JavaDoc comment from the specified text. * * @param text the text of the JavaDoc comment. * @return the created comment. * @throws IncorrectOperationException if the text of the comment is not valid. */ @NotNull PsiDocComment createDocCommentFromText(@NotNull @NonNls String text) throws IncorrectOperationException; /** * Creates a JavaDoc comment from the specified text. * * @param text the text of the JavaDoc comment. * @param context the PSI element used as context for resolving references inside this javadoc * @return the created comment. * @throws IncorrectOperationException if the text of the comment is not valid. */ @NotNull PsiDocComment createDocCommentFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java class with a dummy name from the specified body text (the text between the braces). * * @param text the body text of the class to create. * @param context the PSI element used as context for resolving references which cannot be resolved * within the class. * @return the created class instance. * @throws IncorrectOperationException if the text is not a valid class body. */ @NotNull PsiClass createClassFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java field from the specified text. * * @param text the text of the field to create. * @param context the PSI element used as context for resolving references from the field. * @return the created field instance. * @throws IncorrectOperationException if the text is not a valid field body. */ @NotNull PsiField createFieldFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java method from the specified text with the specified language level. * * @param text the text of the method to create. * @param context the PSI element used as context for resolving references from the method. * @param languageLevel the language level used for creating the method. * @return the created method instance. * @throws IncorrectOperationException if the text is not a valid method body. */ @NotNull PsiMethod createMethodFromText(@NotNull @NonNls String text, @Nullable PsiElement context, LanguageLevel languageLevel) throws IncorrectOperationException; /** * Creates a Java method from the specified text. * * @param text the text of the method to create. * @param context the PSI element used as context for resolving references from the method. * @return the created method instance. * @throws IncorrectOperationException if the text is not a valid method body. */ @NotNull PsiMethod createMethodFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java method parameter from the specified text. * * @param text the text of the parameter to create. * @param context the PSI element used as context for resolving references from the parameter. * @return the created parameter instance. * @throws IncorrectOperationException if the text is not a valid parameter body. */ @NotNull PsiParameter createParameterFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java record header from the specified text (excluding parentheses). * * @param text the text of the record header to create. * @param context the PSI element used as context for resolving references from the header. * @return the created record header instance. * @throws IncorrectOperationException if the text is not a valid record header text. */ @NotNull PsiRecordHeader createRecordHeaderFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java try-resource from the specified text. * * @param text the text of the resource to create. * @param context the PSI element used as context for resolving references from the resource. * @return the created resource instance. * @throws IncorrectOperationException if the text is not a valid resource definition. */ @NotNull PsiResourceVariable createResourceFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java type from the specified text. * * @param text the text of the type to create (for example, a primitive type keyword, an array * declaration or the name of a class). * @param context the PSI element used as context for resolving the reference. * @return the created type instance. * @throws IncorrectOperationException if the text does not specify a valid type. */ @NotNull PsiType createTypeFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java type element from the specified text. * * @param text the text of the type to create (for example, a primitive type keyword, an array * declaration or the name of a class). * @param context the PSI element used as context for resolving the reference. * @return the created type element. * @throws IncorrectOperationException if the text does not specify a valid type. */ @NotNull PsiTypeElement createTypeElementFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java code reference from the specified text. * * @param text the text of the type to create (for example, a primitive type keyword, an array * declaration or the name of a class). * @param context the PSI element used as context for resolving the reference. * @return the created reference element. * @throws IncorrectOperationException if the text does not specify a valid type. */ @NotNull PsiJavaCodeReferenceElement createReferenceFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java code block from the specified text. * * @param text the text of the code block to create. * @param context the PSI element used as context for resolving references from the block. * @return the created code block instance. * @throws IncorrectOperationException if the text does not specify a valid code block. */ @NotNull PsiCodeBlock createCodeBlockFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java statement from the specified text. * * @param text the text of the statement to create. * @param context the PSI element used as context for resolving references from the statement. * @return the created statement instance. * @throws IncorrectOperationException if the text does not specify a valid statement. */ @NotNull PsiStatement createStatementFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java expression from the specified text. * * @param text the text of the expression to create. * @param context the PSI element used as context for resolving references from the expression. * @return the created expression instance. * @throws IncorrectOperationException if the text does not specify a valid expression. */ @NotNull PsiExpression createExpressionFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java comment from the specified text. * * @param text the text of the comment to create. * @param context the PSI element used as context for resolving references from the comment. * @return the created comment instance. * @throws IncorrectOperationException if the text does not specify a valid comment. */ @NotNull PsiComment createCommentFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a type parameter from the specified text. * * @param text the text of the type parameter to create. * @param context the context for resolving references. * @return the created type parameter instance. * @throws IncorrectOperationException if the text does not specify a valid type parameter. */ @NotNull PsiTypeParameter createTypeParameterFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates an annotation from the specified text. * * @param annotationText the text of the annotation to create. * @param context the context for resolving references from the annotation. * @return the created annotation instance. * @throws IncorrectOperationException if the text does not specify a valid annotation. */ @NotNull PsiAnnotation createAnnotationFromText(@NotNull @NonNls String annotationText, @Nullable PsiElement context) throws IncorrectOperationException; @NotNull PsiEnumConstant createEnumConstantFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java type from the specified text. * * @param text the text of the type to create (a primitive type keyword). * @return the created type instance. * @throws IncorrectOperationException if some of the parameters are not valid. */ @NotNull PsiType createPrimitiveTypeFromText(@NotNull @NonNls String text) throws IncorrectOperationException; /** @deprecated use {@link #createModuleFromText(String, PsiElement)} */ @Deprecated default PsiJavaModule createModuleFromText(@NotNull @NonNls String text) throws IncorrectOperationException { return createModuleFromText(text, null); } /** * Creates a Java module declaration from the specified text. */ @NotNull PsiJavaModule createModuleFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java module statement from the specified text. */ @NotNull PsiStatement createModuleStatementFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; /** * Creates a Java module reference element from the specified text. */ @NotNull PsiJavaModuleReferenceElement createModuleReferenceFromText(@NotNull @NonNls String text, @Nullable PsiElement context) throws IncorrectOperationException; }
/* * MIT License * * Copyright (c) 2022 MASES s.r.l. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ /************************************************************************************** * <auto-generated> * This code was generated from a template using JCOReflector * * Manual changes to this file may cause unexpected behavior in your application. * Manual changes to this file will be overwritten if the code is regenerated. * </auto-generated> *************************************************************************************/ package system.runtime.serialization; import org.mases.jcobridge.*; import org.mases.jcobridge.netreflection.*; import java.util.ArrayList; // Import section import system.reflection.MemberInfo; import system.text.StringBuilder; import system.xml.XmlNamespaceManager; /** * The base .NET class managing System.Runtime.Serialization.XPathQueryGenerator, System.Private.DataContractSerialization, Version=4.1.5.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a. * <p> * * See: <a href="https://docs.microsoft.com/en-us/dotnet/api/System.Runtime.Serialization.XPathQueryGenerator" target="_top">https://docs.microsoft.com/en-us/dotnet/api/System.Runtime.Serialization.XPathQueryGenerator</a> */ public class XPathQueryGenerator extends NetObject { /** * Fully assembly qualified name: System.Private.DataContractSerialization, Version=4.1.5.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a */ public static final String assemblyFullName = "System.Private.DataContractSerialization, Version=4.1.5.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a"; /** * Assembly name: System.Private.DataContractSerialization */ public static final String assemblyShortName = "System.Private.DataContractSerialization"; /** * Qualified class name: System.Runtime.Serialization.XPathQueryGenerator */ public static final String className = "System.Runtime.Serialization.XPathQueryGenerator"; static JCOBridge bridge = JCOBridgeInstance.getInstance(assemblyFullName); /** * The type managed from JCOBridge. See {@link JCType} */ public static JCType classType = createType(); static JCEnum enumInstance = null; JCObject classInstance = null; static JCType createType() { try { String classToCreate = className + ", " + (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName); if (JCOReflector.getDebug()) JCOReflector.writeLog("Creating %s", classToCreate); JCType typeCreated = bridge.GetType(classToCreate); if (JCOReflector.getDebug()) JCOReflector.writeLog("Created: %s", (typeCreated != null) ? typeCreated.toString() : "Returned null value"); return typeCreated; } catch (JCException e) { JCOReflector.writeLog(e); return null; } } void addReference(String ref) throws Throwable { try { bridge.AddReference(ref); } catch (JCNativeException jcne) { throw translateException(jcne); } } /** * Internal constructor. Use with caution */ public XPathQueryGenerator(java.lang.Object instance) throws Throwable { super(instance); if (instance instanceof JCObject) { classInstance = (JCObject) instance; } else throw new Exception("Cannot manage object, it is not a JCObject"); } public String getJCOAssemblyName() { return assemblyFullName; } public String getJCOClassName() { return className; } public String getJCOObjectName() { return className + ", " + (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName); } public java.lang.Object getJCOInstance() { return classInstance; } public void setJCOInstance(JCObject instance) { classInstance = instance; super.setJCOInstance(classInstance); } public JCType getJCOType() { return classType; } /** * Try to cast the {@link IJCOBridgeReflected} instance into {@link XPathQueryGenerator}, a cast assert is made to check if types are compatible. * @param from {@link IJCOBridgeReflected} instance to be casted * @return {@link XPathQueryGenerator} instance * @throws java.lang.Throwable in case of error during cast operation */ public static XPathQueryGenerator cast(IJCOBridgeReflected from) throws Throwable { NetType.AssertCast(classType, from); return new XPathQueryGenerator(from.getJCOInstance()); } // Constructors section public XPathQueryGenerator() throws Throwable { } // Methods section public static java.lang.String CreateFromDataContractSerializer(NetType type, MemberInfo[] pathToMember, StringBuilder rootElementXpath, JCORefOut<XmlNamespaceManager> namespaces) throws Throwable, system.ArgumentException, system.ArgumentOutOfRangeException, system.ArgumentNullException, system.InvalidOperationException, system.PlatformNotSupportedException, system.IndexOutOfRangeException, system.NotSupportedException, system.resources.MissingManifestResourceException, system.ObjectDisposedException, system.FormatException, system.ArrayTypeMismatchException, system.OutOfMemoryException { if (classType == null) throw new UnsupportedOperationException("classType is null."); try { return (java.lang.String)classType.Invoke("CreateFromDataContractSerializer", type == null ? null : type.getJCOInstance(), toObjectFromArray(pathToMember), rootElementXpath == null ? null : rootElementXpath.getJCOInstance(), namespaces.getJCRefOut()); } catch (JCNativeException jcne) { throw translateException(jcne); } } public static java.lang.String CreateFromDataContractSerializer(NetType type, MemberInfo[] pathToMember, JCORefOut<XmlNamespaceManager> namespaces) throws Throwable, system.ArgumentException, system.ArgumentOutOfRangeException, system.IndexOutOfRangeException, system.NotSupportedException, system.ArgumentNullException, system.resources.MissingManifestResourceException, system.ObjectDisposedException, system.InvalidOperationException, system.globalization.CultureNotFoundException, system.PlatformNotSupportedException, system.FormatException, system.ArrayTypeMismatchException, system.OutOfMemoryException { if (classType == null) throw new UnsupportedOperationException("classType is null."); try { return (java.lang.String)classType.Invoke("CreateFromDataContractSerializer", type == null ? null : type.getJCOInstance(), toObjectFromArray(pathToMember), namespaces.getJCRefOut()); } catch (JCNativeException jcne) { throw translateException(jcne); } } // Properties section // Instance Events section }
package org.eclipse.scout.healthcare.shared.order; import javax.annotation.Generated; import org.eclipse.scout.rt.shared.data.basic.table.AbstractTableRowData; /** * <b>NOTE:</b><br> * This class is auto generated by the Scout SDK. No manual modifications recommended. */ @Generated(value = "org.eclipse.scout.healthcare.client.order.DealsTablePage", comments = "This class is auto generated by the Scout SDK. No manual modifications recommended.") public class DealsTablePageData extends AbstractDealsTablePageData { private static final long serialVersionUID = 1L; @Override public DealsTableRowData addRow() { return (DealsTableRowData) super.addRow(); } @Override public DealsTableRowData addRow(int rowState) { return (DealsTableRowData) super.addRow(rowState); } @Override public DealsTableRowData createRow() { return new DealsTableRowData(); } @Override public Class<? extends AbstractTableRowData> getRowType() { return DealsTableRowData.class; } @Override public DealsTableRowData[] getRows() { return (DealsTableRowData[]) super.getRows(); } @Override public DealsTableRowData rowAt(int index) { return (DealsTableRowData) super.rowAt(index); } public void setRows(DealsTableRowData[] rows) { super.setRows(rows); } public static class DealsTableRowData extends AbstractDealsTableRowData { private static final long serialVersionUID = 1L; } }
package edu.virginia.vcgr.genii.client.cmd.tools; import java.io.IOException; import java.io.PrintWriter; import edu.virginia.vcgr.genii.client.cmd.InvalidToolUsageException; import edu.virginia.vcgr.genii.client.cmd.ReloadShellException; import edu.virginia.vcgr.genii.client.cmd.ToolException; import edu.virginia.vcgr.genii.client.comm.ClientUtils; import edu.virginia.vcgr.genii.client.comm.SecurityUpdateResults; import edu.virginia.vcgr.genii.client.context.ContextManager; import edu.virginia.vcgr.genii.client.context.ICallingContext; import edu.virginia.vcgr.genii.client.dialog.UserCancelException; import edu.virginia.vcgr.genii.client.io.LoadFileResource; import edu.virginia.vcgr.genii.client.rcreate.CreationException; import edu.virginia.vcgr.genii.client.rns.RNSException; import edu.virginia.vcgr.genii.client.rp.ResourcePropertyException; import edu.virginia.vcgr.genii.client.security.axis.AuthZSecurityException; import edu.virginia.vcgr.genii.security.TransientCredentials; import edu.virginia.vcgr.genii.security.VerbosityLevel; import edu.virginia.vcgr.genii.security.credentials.NuCredential; import edu.virginia.vcgr.genii.security.credentials.X509Identity; import edu.virginia.vcgr.genii.security.identity.IdentityType; import edu.virginia.vcgr.genii.security.x509.KeyAndCertMaterial; public class WhoamiTool extends BaseGridTool { static final private String _DESCRIPTION = "config/tooldocs/description/dwhoami"; static final private String _USAGE = "config/tooldocs/usage/uwhoami"; static final private String _MANPAGE = "config/tooldocs/man/whoami"; private VerbosityLevel _verbosity = VerbosityLevel.OFF; // true if we want to show openssl one-line rdn format. private boolean _oneLine = false; @Option({ "verbosity" }) public void setVerbosity(String verbosityString) throws InvalidToolUsageException { _verbosity = VerbosityLevel.valueOf(verbosityString); if (_verbosity == null) throw new InvalidToolUsageException(); } @Option({ "oneline" }) public void setOneline() { _oneLine = true; } public WhoamiTool() { super(new LoadFileResource(_DESCRIPTION), new LoadFileResource(_USAGE), false, ToolCategory.SECURITY); addManPage(new LoadFileResource(_MANPAGE)); } @Override protected int runCommand() throws ReloadShellException, ToolException, UserCancelException, RNSException, AuthZSecurityException, IOException, ResourcePropertyException, CreationException { ICallingContext callingContext = ContextManager.getCurrentContext(); return displayCredentials(callingContext, true, stdout, stderr); } public int displayCredentials(ICallingContext callingContext, boolean refreshCreds, PrintWriter stdout, PrintWriter stderr) throws AuthZSecurityException { if (callingContext == null) { stdout.println("No credentials"); return 0; } KeyAndCertMaterial clientKeyMaterial = null; if (refreshCreds) { // remove/renew stale creds/attributes. clientKeyMaterial = ClientUtils.checkAndRenewCredentials(callingContext, BaseGridTool.credsValidUntil(), new SecurityUpdateResults()); } else { // just go with what we have already. clientKeyMaterial = callingContext.getActiveKeyAndCertMaterial(); } TransientCredentials transientCredentials = TransientCredentials.getTransientCredentials(callingContext); stdout.format("Client Tool Identity: \n\t%s\n", (new X509Identity(clientKeyMaterial._clientCertChain, IdentityType.CONNECTION)).describe(_verbosity)); if (_oneLine) { stdout.format("\t%s\n", X509Identity.getOpensslRdn(clientKeyMaterial._clientCertChain[0])); } stdout.format("\n"); if (!transientCredentials.isEmpty()) { stdout.format("Additional Credentials: \n"); for (NuCredential cred : transientCredentials.getCredentials()) { stdout.format("\t%s\n", cred.describe(_verbosity)); if (_oneLine) { stdout.format("\t%s\n", X509Identity.getOpensslRdn(cred.getOriginalAsserter()[0])); } } } return 0; } @Override protected void verify() throws ToolException { if (numArguments() != 0) throw new InvalidToolUsageException(); } }
package com.eloli.fakechat.core.config; public abstract class Migrater { public abstract void migrate(Configure from, Configure to); }
package strategyduckapp; public class RedheadDuck extends Duck { public RedheadDuck() { this.quackBehaviour = new Quack(); this.flyBehaviour = new FlyWithWings(); } @Override public void display() { System.out.println("Picture of a readhead duck"); } }
import static java.util.Collections.EMPTY_LIST; import static org.junit.Assert.assertNotNull; public class AnonymousClassJUnit4 { @org.junit.Test public void testNotNull() { assertNotNull(EMPTY_LIST); } }
package io.deephaven.db.v2.tuples.generated; import io.deephaven.datastructures.util.SmartKey; import io.deephaven.db.tables.utils.DBDateTime; import io.deephaven.db.tables.utils.DBTimeUtils; import io.deephaven.db.util.BooleanUtils; import io.deephaven.db.util.tuples.generated.LongByteDoubleTuple; import io.deephaven.db.v2.sources.ColumnSource; import io.deephaven.db.v2.sources.WritableSource; import io.deephaven.db.v2.sources.chunk.Attributes; import io.deephaven.db.v2.sources.chunk.ByteChunk; import io.deephaven.db.v2.sources.chunk.Chunk; import io.deephaven.db.v2.sources.chunk.DoubleChunk; import io.deephaven.db.v2.sources.chunk.LongChunk; import io.deephaven.db.v2.sources.chunk.ObjectChunk; import io.deephaven.db.v2.sources.chunk.WritableChunk; import io.deephaven.db.v2.sources.chunk.WritableObjectChunk; import io.deephaven.db.v2.tuples.AbstractTupleSource; import io.deephaven.db.v2.tuples.ThreeColumnTupleSourceFactory; import io.deephaven.db.v2.tuples.TupleSource; import io.deephaven.util.type.TypeUtils; import org.jetbrains.annotations.NotNull; /** * <p>{@link TupleSource} that produces key column values from {@link ColumnSource} types Long, Byte, and Double. * <p>Generated by {@link io.deephaven.db.v2.tuples.TupleSourceCodeGenerator}. */ @SuppressWarnings({"unused", "WeakerAccess"}) public class ReinterpretedDateTimeReinterpretedBooleanDoubleColumnTupleSource extends AbstractTupleSource<LongByteDoubleTuple> { /** {@link ThreeColumnTupleSourceFactory} instance to create instances of {@link ReinterpretedDateTimeReinterpretedBooleanDoubleColumnTupleSource}. **/ public static final ThreeColumnTupleSourceFactory<LongByteDoubleTuple, Long, Byte, Double> FACTORY = new Factory(); private final ColumnSource<Long> columnSource1; private final ColumnSource<Byte> columnSource2; private final ColumnSource<Double> columnSource3; public ReinterpretedDateTimeReinterpretedBooleanDoubleColumnTupleSource( @NotNull final ColumnSource<Long> columnSource1, @NotNull final ColumnSource<Byte> columnSource2, @NotNull final ColumnSource<Double> columnSource3 ) { super(columnSource1, columnSource2, columnSource3); this.columnSource1 = columnSource1; this.columnSource2 = columnSource2; this.columnSource3 = columnSource3; } @Override public final LongByteDoubleTuple createTuple(final long indexKey) { return new LongByteDoubleTuple( columnSource1.getLong(indexKey), columnSource2.getByte(indexKey), columnSource3.getDouble(indexKey) ); } @Override public final LongByteDoubleTuple createPreviousTuple(final long indexKey) { return new LongByteDoubleTuple( columnSource1.getPrevLong(indexKey), columnSource2.getPrevByte(indexKey), columnSource3.getPrevDouble(indexKey) ); } @Override public final LongByteDoubleTuple createTupleFromValues(@NotNull final Object... values) { return new LongByteDoubleTuple( DBTimeUtils.nanos((DBDateTime)values[0]), BooleanUtils.booleanAsByte((Boolean)values[1]), TypeUtils.unbox((Double)values[2]) ); } @Override public final LongByteDoubleTuple createTupleFromReinterpretedValues(@NotNull final Object... values) { return new LongByteDoubleTuple( TypeUtils.unbox((Long)values[0]), TypeUtils.unbox((Byte)values[1]), TypeUtils.unbox((Double)values[2]) ); } @SuppressWarnings("unchecked") @Override public final <ELEMENT_TYPE> void exportElement(@NotNull final LongByteDoubleTuple tuple, final int elementIndex, @NotNull final WritableSource<ELEMENT_TYPE> writableSource, final long destinationIndexKey) { if (elementIndex == 0) { writableSource.set(destinationIndexKey, (ELEMENT_TYPE) DBTimeUtils.nanosToTime(tuple.getFirstElement())); return; } if (elementIndex == 1) { writableSource.set(destinationIndexKey, (ELEMENT_TYPE) BooleanUtils.byteAsBoolean(tuple.getSecondElement())); return; } if (elementIndex == 2) { writableSource.set(destinationIndexKey, tuple.getThirdElement()); return; } throw new IndexOutOfBoundsException("Invalid element index " + elementIndex + " for export"); } @Override public final Object exportToExternalKey(@NotNull final LongByteDoubleTuple tuple) { return new SmartKey( DBTimeUtils.nanosToTime(tuple.getFirstElement()), BooleanUtils.byteAsBoolean(tuple.getSecondElement()), TypeUtils.box(tuple.getThirdElement()) ); } @Override public final Object exportElement(@NotNull final LongByteDoubleTuple tuple, int elementIndex) { if (elementIndex == 0) { return DBTimeUtils.nanosToTime(tuple.getFirstElement()); } if (elementIndex == 1) { return BooleanUtils.byteAsBoolean(tuple.getSecondElement()); } if (elementIndex == 2) { return TypeUtils.box(tuple.getThirdElement()); } throw new IllegalArgumentException("Bad elementIndex for 3 element tuple: " + elementIndex); } @Override public final Object exportElementReinterpreted(@NotNull final LongByteDoubleTuple tuple, int elementIndex) { if (elementIndex == 0) { return TypeUtils.box(tuple.getFirstElement()); } if (elementIndex == 1) { return TypeUtils.box(tuple.getSecondElement()); } if (elementIndex == 2) { return TypeUtils.box(tuple.getThirdElement()); } throw new IllegalArgumentException("Bad elementIndex for 3 element tuple: " + elementIndex); } @Override public Class<LongByteDoubleTuple> getNativeType() { return LongByteDoubleTuple.class; } @Override protected void convertChunks(@NotNull WritableChunk<? super Attributes.Values> destination, int chunkSize, Chunk<Attributes.Values> [] chunks) { WritableObjectChunk<LongByteDoubleTuple, ? super Attributes.Values> destinationObjectChunk = destination.asWritableObjectChunk(); LongChunk<Attributes.Values> chunk1 = chunks[0].asLongChunk(); ByteChunk<Attributes.Values> chunk2 = chunks[1].asByteChunk(); DoubleChunk<Attributes.Values> chunk3 = chunks[2].asDoubleChunk(); for (int ii = 0; ii < chunkSize; ++ii) { destinationObjectChunk.set(ii, new LongByteDoubleTuple(chunk1.get(ii), chunk2.get(ii), chunk3.get(ii))); } destinationObjectChunk.setSize(chunkSize); } /** {@link ThreeColumnTupleSourceFactory} for instances of {@link ReinterpretedDateTimeReinterpretedBooleanDoubleColumnTupleSource}. **/ private static final class Factory implements ThreeColumnTupleSourceFactory<LongByteDoubleTuple, Long, Byte, Double> { private Factory() { } @Override public TupleSource<LongByteDoubleTuple> create( @NotNull final ColumnSource<Long> columnSource1, @NotNull final ColumnSource<Byte> columnSource2, @NotNull final ColumnSource<Double> columnSource3 ) { return new ReinterpretedDateTimeReinterpretedBooleanDoubleColumnTupleSource( columnSource1, columnSource2, columnSource3 ); } } }
package com.google.maps.android.data.geojson; import com.google.android.gms.maps.model.LatLng; import org.junit.Test; import org.junit.Assert; import java.util.ArrayList; public class GeoJsonLineStringTest { GeoJsonLineString ls; @Test public void testGetType() throws Exception { ArrayList<LatLng> coordinates = new ArrayList<LatLng>(); coordinates.add(new LatLng(0, 0)); coordinates.add(new LatLng(50, 50)); coordinates.add(new LatLng(100, 100)); ls = new GeoJsonLineString(coordinates); Assert.assertEquals("LineString", ls.getType()); } @Test public void testGetCoordinates() throws Exception { ArrayList<LatLng> coordinates = new ArrayList<LatLng>(); coordinates.add(new LatLng(0, 0)); coordinates.add(new LatLng(50, 50)); coordinates.add(new LatLng(100, 100)); ls = new GeoJsonLineString(coordinates); Assert.assertEquals(coordinates, ls.getCoordinates()); try { ls = new GeoJsonLineString(null); Assert.fail(); } catch (IllegalArgumentException e) { Assert.assertEquals("Coordinates cannot be null", e.getMessage()); } } @Test public void testGetAltitudes() throws Exception { ArrayList<LatLng> coordinates = new ArrayList<LatLng>(); coordinates.add(new LatLng(0, 0)); coordinates.add(new LatLng(50, 50)); coordinates.add(new LatLng(100, 100)); ArrayList<Double> altitudes = new ArrayList<Double>(); altitudes.add(new Double(100)); altitudes.add(new Double(200)); altitudes.add(new Double(300)); ls = new GeoJsonLineString(coordinates, altitudes); Assert.assertEquals(altitudes, ls.getAltitudes()); Assert.assertEquals(ls.getAltitudes().get(0), 100.0, 0); Assert.assertEquals(ls.getAltitudes().get(1), 200.0, 0); Assert.assertEquals(ls.getAltitudes().get(2), 300.0, 0); } }
// @(#)$Id: PreorderedCompareTo.java,v 1.5 2005/07/07 21:03:09 leavens Exp $ // Copyright (C) 2005 Iowa State University // // This file is part of the runtime library of the Java Modeling Language. // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public License // as published by the Free Software Foundation; either version 2.1, // of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with JML; see the file LesserGPL.txt. If not, write to the Free // Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA // 02110-1301 USA. package org.jmlspecs.unfinished.resolve; /** Objects with a preorder for their compareTo operation. * * @version $Revision: 1.5 $ * @author Gary T. Leavens */ public interface PreorderedCompareTo extends ReflexiveCompareTo, TransitiveCompareTo { }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math4.distribution; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import java.util.List; import org.apache.commons.statistics.distribution.ContinuousDistribution; import org.apache.commons.math4.exception.DimensionMismatchException; import org.apache.commons.math4.exception.MathArithmeticException; import org.apache.commons.math4.exception.NotANumberException; import org.apache.commons.math4.exception.NotFiniteNumberException; import org.apache.commons.math4.exception.NotPositiveException; import org.apache.commons.math4.util.FastMath; import org.apache.commons.math4.util.Pair; import org.apache.commons.rng.UniformRandomProvider; import org.apache.commons.rng.simple.RandomSource; import org.junit.Assert; import org.junit.Test; /** * Test class for {@link EnumeratedRealDistribution}. * */ public class EnumeratedRealDistributionTest { /** * The distribution object used for testing. */ private final EnumeratedRealDistribution testDistribution; /** * Creates the default distribution object used for testing. */ public EnumeratedRealDistributionTest() { // Non-sorted singleton array with duplicates should be allowed. // Values with zero-probability do not extend the support. testDistribution = new EnumeratedRealDistribution( new double[]{3.0, -1.0, 3.0, 7.0, -2.0, 8.0}, new double[]{0.2, 0.2, 0.3, 0.3, 0.0, 0.0}); } /** * Tests if the {@link EnumeratedRealDistribution} constructor throws * exceptions for invalid data. */ @Test public void testExceptions() { EnumeratedRealDistribution invalid = null; try { invalid = new EnumeratedRealDistribution(new double[]{1.0, 2.0}, new double[]{0.0}); Assert.fail("Expected DimensionMismatchException"); } catch (DimensionMismatchException e) { } try{ invalid = new EnumeratedRealDistribution(new double[]{1.0, 2.0}, new double[]{0.0, -1.0}); Assert.fail("Expected NotPositiveException"); } catch (NotPositiveException e) { } try { invalid = new EnumeratedRealDistribution(new double[]{1.0, 2.0}, new double[]{0.0, 0.0}); Assert.fail("Expected MathArithmeticException"); } catch (MathArithmeticException e) { } try { invalid = new EnumeratedRealDistribution(new double[]{1.0, 2.0}, new double[]{0.0, Double.NaN}); Assert.fail("Expected NotANumberException"); } catch (NotANumberException e) { } try { invalid = new EnumeratedRealDistribution(new double[]{1.0, 2.0}, new double[]{0.0, Double.POSITIVE_INFINITY}); Assert.fail("Expected NotFiniteNumberException"); } catch (NotFiniteNumberException e) { } Assert.assertNull("Expected non-initialized DiscreteRealDistribution", invalid); } /** * Tests if the distribution returns proper probability values. */ @Test public void testProbability() { double[] points = new double[]{-2.0, -1.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0}; double[] results = new double[]{0, 0.2, 0, 0, 0, 0.5, 0, 0, 0, 0.3, 0}; for (int p = 0; p < points.length; p++) { double density = testDistribution.probability(points[p]); Assert.assertEquals(results[p], density, 0.0); } } /** * Tests if the distribution returns proper density values. */ @Test public void testDensity() { double[] points = new double[]{-2.0, -1.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0}; double[] results = new double[]{0, 0.2, 0, 0, 0, 0.5, 0, 0, 0, 0.3, 0}; for (int p = 0; p < points.length; p++) { double density = testDistribution.density(points[p]); Assert.assertEquals(results[p], density, 0.0); } } /** * Tests if the distribution returns proper cumulative probability values. */ @Test public void testCumulativeProbability() { double[] points = new double[]{-2.0, -1.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0}; double[] results = new double[]{0, 0.2, 0.2, 0.2, 0.2, 0.7, 0.7, 0.7, 0.7, 1.0, 1.0}; for (int p = 0; p < points.length; p++) { double probability = testDistribution.cumulativeProbability(points[p]); Assert.assertEquals(results[p], probability, 1e-10); } } /** * Tests if the distribution returns proper mean value. */ @Test public void testGetNumericalMean() { Assert.assertEquals(3.4, testDistribution.getMean(), 1e-10); } /** * Tests if the distribution returns proper variance. */ @Test public void testGetNumericalVariance() { Assert.assertEquals(7.84, testDistribution.getVariance(), 1e-10); } /** * Tests if the distribution returns proper lower bound. */ @Test public void testGetSupportLowerBound() { Assert.assertEquals(-1, testDistribution.getSupportLowerBound(), 0); } /** * Tests if the distribution returns proper upper bound. */ @Test public void testGetSupportUpperBound() { Assert.assertEquals(7, testDistribution.getSupportUpperBound(), 0); } /** * Tests if the distribution returns properly that the support is connected. */ @Test public void testIsSupportConnected() { Assert.assertTrue(testDistribution.isSupportConnected()); } /** * Tests sampling. */ @Test public void testSample() { final int n = 1000000; final ContinuousDistribution.Sampler sampler = testDistribution.createSampler(RandomSource.create(RandomSource.WELL_1024_A, -123456789)); final double[] samples = AbstractRealDistribution.sample(n, sampler); Assert.assertEquals(n, samples.length); double sum = 0; double sumOfSquares = 0; for (int i = 0; i < samples.length; i++) { sum += samples[i]; sumOfSquares += samples[i] * samples[i]; } Assert.assertEquals(testDistribution.getMean(), sum / n, 1e-2); Assert.assertEquals(testDistribution.getVariance(), sumOfSquares / n - FastMath.pow(sum / n, 2), 1e-2); } @Test public void testIssue942() { List<Pair<Object,Double>> list = new ArrayList<>(); list.add(new Pair<Object, Double>(new Object() {}, new Double(0))); list.add(new Pair<Object, Double>(new Object() {}, new Double(1))); final UniformRandomProvider rng = RandomSource.create(RandomSource.WELL_512_A); Assert.assertEquals(1, new EnumeratedDistribution<>(list).createSampler(rng).sample(1).length); } @Test public void testIssue1065() { // Test Distribution for inverseCumulativeProbability // // ^ // | // 1.000 +--------------------------------o=============== // | 3| // | | // | 1o= // 0.750 +-------------------------> o== . // | 3| . . // | 0 | . . // 0.5625 +---------------> o==o====== . . // | | . . . . // | | . . . . // | 5| . . . . // | | . . . . // | o=== . . . . // | | . . . . . // | 4| . . . . . // | | . . . . . // 0.000 +=============----+--+------+--+-+---------------> // 14 18 21 28 31 33 // // sum = 4+5+0+3+1+3 = 16 EnumeratedRealDistribution distribution = new EnumeratedRealDistribution( new double[] { 14.0, 18.0, 21.0, 28.0, 31.0, 33.0 }, new double[] { 4.0 / 16.0, 5.0 / 16.0, 0.0 / 16.0, 3.0 / 16.0, 1.0 / 16.0, 3.0 / 16.0 }); assertEquals(14.0, distribution.inverseCumulativeProbability(0.0000), 0.0); assertEquals(14.0, distribution.inverseCumulativeProbability(0.2500), 0.0); assertEquals(33.0, distribution.inverseCumulativeProbability(1.0000), 0.0); assertEquals(18.0, distribution.inverseCumulativeProbability(0.5000), 0.0); assertEquals(18.0, distribution.inverseCumulativeProbability(0.5624), 0.0); assertEquals(28.0, distribution.inverseCumulativeProbability(0.5626), 0.0); assertEquals(31.0, distribution.inverseCumulativeProbability(0.7600), 0.0); assertEquals(18.0, distribution.inverseCumulativeProbability(0.5625), 0.0); assertEquals(28.0, distribution.inverseCumulativeProbability(0.7500), 0.0); } @Test public void testCreateFromDoubles() { final double[] data = new double[] {0, 1, 1, 2, 2, 2}; EnumeratedRealDistribution distribution = new EnumeratedRealDistribution(data); assertEquals(0.5, distribution.probability(2), 0); assertEquals(0.5, distribution.cumulativeProbability(1), 0); } }
/** * Copyright 2009 Humboldt-Universität zu Berlin. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package org.corpus_tools.salt.common.impl.tests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import org.corpus_tools.salt.SaltFactory; import org.corpus_tools.salt.common.SMedialDS; import org.corpus_tools.salt.common.SMedialRelation; import org.corpus_tools.salt.common.SToken; import org.junit.Before; import org.junit.Test; public class SMediaRelationTest extends SSequentialRelationTest<SToken, SMedialDS, Double> { @Override protected SMedialRelation getFixture() { return ((SMedialRelation) super.getFixture()); } @Before public void setUp() { setFixture(SaltFactory.createSMedialRelation()); } @Override @Test public void testSetGetStart() { assertNull(getFixture().getStart()); getFixture().setStart(1.0); assertEquals(Double.valueOf(1.0), getFixture().getStart()); getFixture().setStart(2.0); assertEquals(Double.valueOf(2.0), getFixture().getStart()); } @Override @Test public void testSetGetEnd() { assertNull(getFixture().getEnd()); getFixture().setEnd(1.0); assertEquals(Double.valueOf(1.0), getFixture().getEnd()); getFixture().setEnd(2.0); assertEquals(Double.valueOf(2.0), getFixture().getEnd()); } } // SAudioDSRelationTest
/* * Copyright 2004-2012 the Seasar Foundation and the Others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package jp.fieldnotes.hatunatu.util.convert; import java.sql.Time; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.Locale; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import static org.hamcrest.CoreMatchers.*; import static org.junit.Assert.*; import static jp.fieldnotes.hatunatu.util.convert.TimeConversionUtil.*; /** * @author higa */ public class TimeConversionUtilTest { Locale defaultLocale = Locale.getDefault(); /** * @throws Exception */ @Before public void setUp() throws Exception { Locale.setDefault(Locale.JAPANESE); } /** * @throws Exception */ @After public void tearDown() throws Exception { Locale.setDefault(defaultLocale); } /** * @throws Exception */ @Test public void testToDate_Null() throws Exception { assertThat(toDate(null), is(nullValue())); } /** * @throws Exception */ @Test public void testToDate_EmptyString() throws Exception { assertThat(toDate(""), is(nullValue())); } /** * @throws Exception */ @Test public void testToDate_ShortStyle() throws Exception { Date date = toDate("11:49"); assertThat( new SimpleDateFormat("HH:mm:ss").format(date), is("11:49:00")); } /** * @throws Exception */ @Test public void testToDate_MediumStyle() throws Exception { Date date = toDate("11:49:10"); assertThat( new SimpleDateFormat("HH:mm:ss").format(date), is("11:49:10")); } /** * @throws Exception */ @Test @Ignore public void testToDate_LongStyle() throws Exception { Date date = toDate("11:49:10 JST"); assertThat( new SimpleDateFormat("HH:mm:ss").format(date), is("11:49:10")); } /** * @throws Exception */ @Test @Ignore public void testToDate_FullStyle() throws Exception { Date date = toDate("11時49分10秒 JST"); assertThat( new SimpleDateFormat("HH:mm:ss").format(date), is("11:49:10")); } /** * @throws Exception */ @Test public void testToDate_PlainFormat() throws Exception { Date date = toDate("114910"); assertThat( new SimpleDateFormat("HH:mm:ss").format(date), is("11:49:10")); } /** * @throws Exception */ @Test public void testToDate_JdbcEscapeFormat() throws Exception { Date date = toDate("11:49:10"); assertThat( new SimpleDateFormat("HH:mm:ss").format(date), is("11:49:10")); } /** * @throws Exception */ @Test public void testToDate_SpecificLocale() throws Exception { Date date = toDate("11:49:10 AM", Locale.US); assertThat( new SimpleDateFormat("HH:mm:ss").format(date), is("11:49:10")); } /** * @throws Exception */ @Test public void testToDate_SpecificPattern() throws Exception { Date date = toDate("10::49::11", "ss::mm::HH"); assertThat( new SimpleDateFormat("HH:mm:ss").format(date), is("11:49:10")); } /** * @throws Exception */ @Test public void testToCalendar_Null() throws Exception { assertThat(toCalendar(null), is(nullValue())); } /** * @throws Exception */ @Test public void testToCalendar_EmptyString() throws Exception { assertThat(toCalendar(""), is(nullValue())); } /** * @throws Exception */ @Test public void testToCalendar_ShortStyle() throws Exception { Calendar calendar = toCalendar("11:49"); assertThat( new SimpleDateFormat("HH:mm:ss").format(calendar.getTime()), is("11:49:00")); } /** * @throws Exception */ @Test public void testToCalendar_MediumStyle() throws Exception { Calendar calendar = toCalendar("11:49:10"); assertThat( new SimpleDateFormat("HH:mm:ss").format(calendar.getTime()), is("11:49:10")); } /** * @throws Exception */ @Test @Ignore public void testToCalendar_LongStyle() throws Exception { Calendar calendar = toCalendar("11:49:10 JST"); assertThat( new SimpleDateFormat("HH:mm:ss").format(calendar.getTime()), is("11:49:10")); } /** * @throws Exception */ @Test @Ignore public void testToCalendar_FullStyle() throws Exception { Calendar calendar = toCalendar("11時49分10秒 JST"); assertThat( new SimpleDateFormat("HH:mm:ss").format(calendar.getTime()), is("11:49:10")); } /** * @throws Exception */ @Test public void testToCalendar_PlainFormat() throws Exception { Calendar calendar = toCalendar("114910"); assertThat( new SimpleDateFormat("HH:mm:ss").format(calendar.getTime()), is("11:49:10")); } /** * @throws Exception */ @Test public void testToCalendar_JdbcEscapeFormat() throws Exception { Calendar calendar = toCalendar("11:49:10"); assertThat( new SimpleDateFormat("HH:mm:ss").format(calendar.getTime()), is("11:49:10")); } /** * @throws Exception */ @Test public void testToCalendar_SpecificLocale() throws Exception { Calendar calendar = toCalendar("11:49:10 AM", Locale.US); assertThat( new SimpleDateFormat("HH:mm:ss").format(calendar.getTime()), is("11:49:10")); } /** * @throws Exception */ @Test public void testToCalendar_SpecificPattern() throws Exception { Calendar calendar = toCalendar("10::49::11", "ss::mm::HH"); assertThat( new SimpleDateFormat("HH:mm:ss").format(calendar.getTime()), is("11:49:10")); } /** * @throws Exception */ @Test public void testToSqlTime_Null() throws Exception { assertThat(toSqlTime(null), is(nullValue())); } /** * @throws Exception */ @Test public void testToTime_EmptyString() throws Exception { assertThat(toSqlTime(""), is(nullValue())); } /** * @throws Exception */ @Test public void testToTime_ShortStyle() throws Exception { Time time = toSqlTime("11:49"); assertThat( new SimpleDateFormat("HH:mm:ss").format(time), is("11:49:00")); } /** * @throws Exception */ @Test public void testToTime_MediumStyle() throws Exception { Time time = toSqlTime("11:49:10"); assertThat( new SimpleDateFormat("HH:mm:ss").format(time), is("11:49:10")); } /** * @throws Exception */ @Test @Ignore public void testToTime_LongStyle() throws Exception { Time time = toSqlTime("11:49:10 JST"); assertThat( new SimpleDateFormat("HH:mm:ss").format(time), is("11:49:10")); } /** * @throws Exception */ @Test @Ignore public void testToTime_FullStyle() throws Exception { Time time = toSqlTime("11時49分10秒 JST"); assertThat( new SimpleDateFormat("HH:mm:ss").format(time), is("11:49:10")); } /** * @throws Exception */ @Test public void testToTime_PlainFormat() throws Exception { Time time = toSqlTime("114910"); assertThat( new SimpleDateFormat("HH:mm:ss").format(time), is("11:49:10")); } /** * @throws Exception */ @Test public void testToTime_JdbcEscapeFormat() throws Exception { Time time = toSqlTime("11:49:10"); assertThat( new SimpleDateFormat("HH:mm:ss").format(time), is("11:49:10")); } /** * @throws Exception */ @Test public void testToTime_SpecificLocale() throws Exception { Time time = toSqlTime("11:49:10 AM", Locale.US); assertThat( new SimpleDateFormat("HH:mm:ss").format(time), is("11:49:10")); } /** * @throws Exception */ @Test public void testToTime_SpecificPattern() throws Exception { Time time = toSqlTime("10::49::11", "ss::mm::HH"); assertThat( new SimpleDateFormat("HH:mm:ss").format(time), is("11:49:10")); } /** * @throws Exception */ @Test public void testToPlainPattern() throws Exception { assertThat(toPlainPattern("H:m:s"), is("HHmmss")); } }