text
stringlengths
7
1.01M
package com.javarush.test.level13.lesson02.task06; /* Баг в initializeIdAndName 1. Подумать, что в программе неправильно. 2. Вынести реализацию метода initializeIdAndName в класс User. 3. initializeIdAndName в классе User должен возвращать тип User. 4. Поправить программу, чтобы компилировалась и работала. */ public class Solution { public static void main(String[] args) throws Exception { System.out.println(Matrix.NEO); System.out.println(Matrix.TRINITY); } static class Matrix { public static DBObject NEO = new User().initializeIdAndName(1, "Neo"); public static DBObject TRINITY = new User().initializeIdAndName(2, "Trinity"); } interface DBObject { DBObject initializeIdAndName(long id, String name); } static class User implements DBObject { long id; String name; public User initializeIdAndName(long id, String name) { this.id = id; this.name = name; return this; } @Override public String toString() { return String.format("User has name %s, id = %d", name, id); } } }
/* * Copyright (c) 2006-2011 Christian Plattner. All rights reserved. * Please refer to the LICENSE.txt for licensing details. */ package ch.ethz.ssh2.auth; import java.io.IOException; import java.io.InterruptedIOException; import java.security.SecureRandom; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import ch.ethz.ssh2.InteractiveCallback; import ch.ethz.ssh2.PacketTypeException; import ch.ethz.ssh2.crypto.PEMDecoder; import ch.ethz.ssh2.packets.PacketServiceAccept; import ch.ethz.ssh2.packets.PacketServiceRequest; import ch.ethz.ssh2.packets.PacketUserauthBanner; import ch.ethz.ssh2.packets.PacketUserauthFailure; import ch.ethz.ssh2.packets.PacketUserauthInfoRequest; import ch.ethz.ssh2.packets.PacketUserauthInfoResponse; import ch.ethz.ssh2.packets.PacketUserauthRequestInteractive; import ch.ethz.ssh2.packets.PacketUserauthRequestNone; import ch.ethz.ssh2.packets.PacketUserauthRequestPassword; import ch.ethz.ssh2.packets.PacketUserauthRequestPublicKey; import ch.ethz.ssh2.packets.Packets; import ch.ethz.ssh2.packets.TypesWriter; import ch.ethz.ssh2.signature.DSAPrivateKey; import ch.ethz.ssh2.signature.DSASHA1Verify; import ch.ethz.ssh2.signature.DSASignature; import ch.ethz.ssh2.signature.RSAPrivateKey; import ch.ethz.ssh2.signature.RSASHA1Verify; import ch.ethz.ssh2.signature.RSASignature; import ch.ethz.ssh2.transport.ClientTransportManager; import ch.ethz.ssh2.transport.MessageHandler; /** * @author Christian Plattner * @version $Id$ */ public class AuthenticationManager implements MessageHandler { private ClientTransportManager tm; private final BlockingQueue<byte[]> packets = new ArrayBlockingQueue<byte[]>(5); private boolean connectionClosed = false; private String banner; private Set<String> remainingMethods = new HashSet<String>(); private boolean isPartialSuccess = false; private boolean authenticated = false; private boolean initDone = false; public AuthenticationManager(ClientTransportManager tm) { this.tm = tm; } private byte[] deQueue() throws IOException { if(connectionClosed) { throw tm.getReasonClosedCause(); } // Wait for packet try { return packets.take(); } catch(InterruptedException e) { throw new InterruptedIOException(e.getMessage()); } } byte[] getNextMessage() throws IOException { while(true) { byte[] message = deQueue(); switch(message[0]) { case Packets.SSH_MSG_USERAUTH_BANNER: // The server may send an SSH_MSG_USERAUTH_BANNER message at any // time after this authentication protocol starts and before // authentication is successful. PacketUserauthBanner sb = new PacketUserauthBanner(message); banner = sb.getBanner(); break; default: return message; } } } public Set<String> getRemainingMethods(String user) throws IOException { initialize(user); return remainingMethods; } public String getBanner() { return banner; } public boolean getPartialSuccess() { return isPartialSuccess; } private boolean initialize(String user) throws IOException { if(initDone == false) { tm.registerMessageHandler(this, 0, 255); PacketServiceRequest sr = new PacketServiceRequest("ssh-userauth"); tm.sendMessage(sr.getPayload()); final PacketServiceAccept accept = new PacketServiceAccept(this.getNextMessage()); PacketUserauthRequestNone auth = new PacketUserauthRequestNone("ssh-connection", user); tm.sendMessage(auth.getPayload()); byte[] message = this.getNextMessage(); initDone = true; switch(message[0]) { case Packets.SSH_MSG_USERAUTH_SUCCESS: authenticated = true; tm.removeMessageHandler(this); return true; case Packets.SSH_MSG_USERAUTH_FAILURE: PacketUserauthFailure puf = new PacketUserauthFailure(message); remainingMethods = puf.getAuthThatCanContinue(); isPartialSuccess = puf.isPartialSuccess(); return false; } throw new PacketTypeException(message[0]); } return authenticated; } public boolean authenticatePublicKey(String user, AgentProxy proxy) throws IOException { initialize(user); boolean success; for(AgentIdentity identity : proxy.getIdentities()) { success = authenticatePublicKey(user, identity); if(success) { return true; } } return false; } private boolean authenticatePublicKey(String user, AgentIdentity identity) throws IOException { if(!remainingMethods.contains("publickey")) { throw new IOException("Authentication method not supported"); } byte[] pubKeyBlob = identity.getPublicKeyBlob(); if(pubKeyBlob == null) { return false; } TypesWriter tw = new TypesWriter(); byte[] H = tm.getSessionIdentifier(); tw.writeString(H, 0, H.length); tw.writeByte(Packets.SSH_MSG_USERAUTH_REQUEST); tw.writeString(user); tw.writeString("ssh-connection"); tw.writeString("publickey"); tw.writeBoolean(true); tw.writeString(identity.getAlgName()); tw.writeString(pubKeyBlob, 0, pubKeyBlob.length); byte[] msg = tw.getBytes(); byte[] response = identity.sign(msg); PacketUserauthRequestPublicKey ua = new PacketUserauthRequestPublicKey( "ssh-connection", user, identity.getAlgName(), pubKeyBlob, response); tm.sendMessage(ua.getPayload()); byte[] message = getNextMessage(); final int type = message[0]; switch(type) { case Packets.SSH_MSG_USERAUTH_SUCCESS: authenticated = true; tm.removeMessageHandler(this); return true; case Packets.SSH_MSG_USERAUTH_FAILURE: PacketUserauthFailure puf = new PacketUserauthFailure(message); remainingMethods = puf.getAuthThatCanContinue(); isPartialSuccess = puf.isPartialSuccess(); return false; } throw new PacketTypeException(type); } public boolean authenticatePublicKey(String user, char[] PEMPrivateKey, String password, SecureRandom rnd) throws IOException { try { initialize(user); if(!remainingMethods.contains("publickey")) { throw new IOException("Authentication method publickey not supported by the server at this stage."); } Object key = PEMDecoder.decode(PEMPrivateKey, password); if(key instanceof DSAPrivateKey) { DSAPrivateKey pk = (DSAPrivateKey) key; byte[] pk_enc = DSASHA1Verify.encodeSSHDSAPublicKey(pk.getPublicKey()); TypesWriter tw = new TypesWriter(); byte[] H = tm.getSessionIdentifier(); tw.writeString(H, 0, H.length); tw.writeByte(Packets.SSH_MSG_USERAUTH_REQUEST); tw.writeString(user); tw.writeString("ssh-connection"); tw.writeString("publickey"); tw.writeBoolean(true); tw.writeString("ssh-dss"); tw.writeString(pk_enc, 0, pk_enc.length); byte[] msg = tw.getBytes(); DSASignature ds = DSASHA1Verify.generateSignature(msg, pk, rnd); byte[] ds_enc = DSASHA1Verify.encodeSSHDSASignature(ds); PacketUserauthRequestPublicKey ua = new PacketUserauthRequestPublicKey("ssh-connection", user, "ssh-dss", pk_enc, ds_enc); tm.sendMessage(ua.getPayload()); } else if(key instanceof RSAPrivateKey) { RSAPrivateKey pk = (RSAPrivateKey) key; byte[] pk_enc = RSASHA1Verify.encodeSSHRSAPublicKey(pk.getPublicKey()); TypesWriter tw = new TypesWriter(); { byte[] H = tm.getSessionIdentifier(); tw.writeString(H, 0, H.length); tw.writeByte(Packets.SSH_MSG_USERAUTH_REQUEST); tw.writeString(user); tw.writeString("ssh-connection"); tw.writeString("publickey"); tw.writeBoolean(true); tw.writeString("ssh-rsa"); tw.writeString(pk_enc, 0, pk_enc.length); } byte[] msg = tw.getBytes(); RSASignature ds = RSASHA1Verify.generateSignature(msg, pk); byte[] rsa_sig_enc = RSASHA1Verify.encodeSSHRSASignature(ds); PacketUserauthRequestPublicKey ua = new PacketUserauthRequestPublicKey("ssh-connection", user, "ssh-rsa", pk_enc, rsa_sig_enc); tm.sendMessage(ua.getPayload()); } else { throw new IOException("Unknown private key type returned by the PEM decoder."); } byte[] message = getNextMessage(); final int type = message[0]; switch(type) { case Packets.SSH_MSG_USERAUTH_SUCCESS: authenticated = true; tm.removeMessageHandler(this); return true; case Packets.SSH_MSG_USERAUTH_FAILURE: PacketUserauthFailure puf = new PacketUserauthFailure(message); remainingMethods = puf.getAuthThatCanContinue(); isPartialSuccess = puf.isPartialSuccess(); return false; } throw new PacketTypeException(type); } catch(IOException e) { tm.close(e); throw e; } } public boolean authenticateNone(String user) throws IOException { try { initialize(user); return authenticated; } catch(IOException e) { tm.close(e); throw e; } } public boolean authenticatePassword(String user, String pass) throws IOException { try { initialize(user); if(!remainingMethods.contains("password")) { throw new IOException("Authentication method not supported"); } PacketUserauthRequestPassword ua = new PacketUserauthRequestPassword("ssh-connection", user, pass); tm.sendMessage(ua.getPayload()); byte[] message = getNextMessage(); final int type = message[0]; switch(type) { case Packets.SSH_MSG_USERAUTH_SUCCESS: authenticated = true; tm.removeMessageHandler(this); return true; case Packets.SSH_MSG_USERAUTH_FAILURE: PacketUserauthFailure puf = new PacketUserauthFailure(message); remainingMethods = puf.getAuthThatCanContinue(); isPartialSuccess = puf.isPartialSuccess(); return false; } throw new PacketTypeException(type); } catch(IOException e) { tm.close(e); throw e; } } public boolean authenticateInteractive(String user, String[] submethods, InteractiveCallback cb) throws IOException { try { initialize(user); if(!remainingMethods.contains("keyboard-interactive")) { throw new IOException( "Authentication method keyboard-interactive not supported by the server at this stage."); } PacketUserauthRequestInteractive ua = new PacketUserauthRequestInteractive("ssh-connection", user, submethods); tm.sendMessage(ua.getPayload()); while(true) { byte[] message = getNextMessage(); final int type = message[0]; switch(type) { case Packets.SSH_MSG_USERAUTH_SUCCESS: authenticated = true; tm.removeMessageHandler(this); return true; case Packets.SSH_MSG_USERAUTH_FAILURE: PacketUserauthFailure puf = new PacketUserauthFailure(message); remainingMethods = puf.getAuthThatCanContinue(); isPartialSuccess = puf.isPartialSuccess(); return false; case Packets.SSH_MSG_USERAUTH_INFO_REQUEST: PacketUserauthInfoRequest info = new PacketUserauthInfoRequest(message); String[] responses; try { responses = cb.replyToChallenge(info.getName(), info.getInstruction(), info.getNumPrompts(), info.getPrompt(), info.getEcho()); } catch(Exception e) { throw new IOException("Exception in callback.", e); } PacketUserauthInfoResponse puir = new PacketUserauthInfoResponse(responses); tm.sendMessage(puir.getPayload()); continue; } throw new PacketTypeException(type); } } catch(IOException e) { tm.close(e); throw e; } } @Override public void handleFailure(final IOException failure) { connectionClosed = true; } @Override public void handleMessage(byte[] message) throws IOException { packets.add(message); } }
//https://www.hackerrank.com/challenges/taum-and-bday import java.io.*; public class Solution { public static void main(String[] args) throws IOException { StringBuffer sb = new StringBuffer(); BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); //For each test case for(byte T = Byte.parseByte(br.readLine()); T > 0; --T){ //Get inputs String[] line = br.readLine().split(" "); final int B = Integer.parseInt(line[0]); final int W = Integer.parseInt(line[1]); line = br.readLine().split(" "); int X = Integer.parseInt(line[0]); int Y = Integer.parseInt(line[1]); final int Z = Integer.parseInt(line[2]); //Calculate minimum cost if(Y+Z < X){ X = Y+Z; } else if(X+Z < Y){ Y = X+Z; } long cost = (1L*B*X) + (1L*W*Y); //Print output sb.append(cost).append("\n"); } System.out.print(sb); } }
/******************************************************************************* * Copyright 2012 Manning Publications Co. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.manning.cmis.theblend.android.albums; import java.util.List; import org.apache.chemistry.opencmis.client.api.Document; import org.apache.chemistry.opencmis.client.api.Session; import android.app.Activity; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.AdapterView.OnItemLongClickListener; import android.widget.ListView; import android.widget.ProgressBar; import android.widget.TextView; import com.manning.cmis.theblend.android.R; import com.manning.cmis.theblend.android.albums.actions.CreateAlbumTask; import com.manning.cmis.theblend.android.constant.BundleConstant; import com.manning.cmis.theblend.android.properties.PropertyActivity; /** * AlbumsActivity is responsible to display the list of cmisbook:album present * in the CMIS Server for the Blend Application. * * @author Jean Marie Pascal * */ public class AlbumsActivity extends Activity { /** The OpenCMIS Server session. */ private Session session; /** ProgressBar associated to the list. */ private ProgressBar albumsProgressBar; /** The listview of the activity. */ private ListView albumsListView; /** The empty view associated to the albums listview. */ private View albumsEmptyView; /* * (non-Javadoc) * * @see android.app.Activity#onCreate(android.os.Bundle) */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.list); // Retrieves informations from Intent if (getIntent().getExtras() != null) { Bundle b = getIntent().getExtras().getBundle(BundleConstant.KEY_EXTRAS); session = (Session) b.getSerializable(BundleConstant.KEY_SESSION); } // Initiates UI Components albumsProgressBar = (ProgressBar) findViewById(R.id.progressbar); albumsListView = (ListView) findViewById(R.id.listView); albumsEmptyView = findViewById(R.id.empty); TextView evt = (TextView) findViewById(R.id.empty_text); evt.setText(R.string.no_albums); // Simple click on item : goes to Album Details activity albumsListView.setOnItemLongClickListener(new OnItemLongClickListener() { public boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id) { displayAlbumProperties((Document) parent.getItemAtPosition(position)); return true; } }); // Simple click on item : goes to Album Details activity albumsListView.setOnItemClickListener(new OnItemClickListener() { public void onItemClick(AdapterView<?> l, View v, int position, long id) { displayAlbumsDetails((Document) l.getItemAtPosition(position)); } }); // Display the progressbar until having the list of Albums from server. setListShown(false); // Start the asynctask to retrieve list of albums from the server listAlbums(); } /** * Display albums details screen via Intent. * * @param album * : the album (Document object) */ public void displayAlbumsDetails(Document album) { Bundle b = new Bundle(); b.putSerializable(BundleConstant.KEY_SESSION, session); b.putString(BundleConstant.KEY_ALBUM_ID, (String) album.getId()); Intent i = new Intent(this, AlbumDetailsActivity.class); i.putExtra(BundleConstant.KEY_EXTRAS, b); startActivity(i); } /** * Display Properties details screen via Intent. * * @param album * : the album (Document object) */ public void displayAlbumProperties(Document album) { Bundle b = new Bundle(); b.putSerializable(BundleConstant.KEY_SESSION, session); b.putString(BundleConstant.KEY_CMISOBJECT_ID, (String) album.getId()); Intent i = new Intent(this, PropertyActivity.class); i.putExtra(BundleConstant.KEY_EXTRAS, b); startActivity(i); } /** * Start AsyncTask to retrieve albums list. */ public void listAlbums() { new AlbumsTask(this, session).execute(); } /** * Callback method for AlbumTask. Creates the listAdapter based on data * retrieved. * * @param albums * the albums : List of albums from server. */ public void listAlbums(List<Document> albums) { if (albums != null && !albums.isEmpty()) { albumsListView.setAdapter(new AlbumsAdapter(AlbumsActivity.this, albums)); } else { albumsListView.setEmptyView(albumsEmptyView); } setListShown(true); } /* * (non-Javadoc) * * @see android.app.Activity#onCreateOptionsMenu(android.view.Menu) */ public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.albums, menu); return true; } /* * (non-Javadoc) * * @see android.app.Activity#onOptionsItemSelected(android.view.MenuItem) */ @Override public boolean onOptionsItemSelected(MenuItem item) { // Actions available inside the activity. switch (item.getItemId()) { case R.id.menu_add: addAlbum(); return true; default: return super.onOptionsItemSelected(item); } } /** * Display or not the listView. If not displayed, an indeterminate * progressbar is displayed. * * @param shown * : true to display the listview. False otherwise * */ protected void setListShown(Boolean shown) { if (shown) { albumsListView.setVisibility(View.VISIBLE); albumsProgressBar.setVisibility(View.GONE); } else { albumsEmptyView.setVisibility(View.GONE); albumsListView.setVisibility(View.GONE); albumsProgressBar.setVisibility(View.VISIBLE); } } /** * Actions : Create a cmisbook:album inside the repository. <br> * Displays a popup to get the information from the user. */ public void addAlbum() { LayoutInflater inflater = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE); final View form = inflater.inflate(R.layout.create_album, null, false); new AlertDialog.Builder(this).setTitle(R.string.add_album_title).setView(form) .setPositiveButton(R.string.create_add_album, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { String title = ((TextView) form.findViewById(R.id.album_title)).getText().toString(); String path = ((TextView) form.findViewById(R.id.parent_folder_path)).getText().toString(); new CreateAlbumTask(AlbumsActivity.this, session, path, title).execute(); } }).setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { dialog.dismiss(); } }).show(); } }
package com.orelogo.relink; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.support.annotation.Nullable; import android.support.v4.app.NotificationCompat; import android.support.v4.app.TaskStackBuilder; /** * Publish notification. */ public class NotificationPublisher extends BroadcastReceiver { private static final int NOTIFICATION_ID = 0; // notification id /** * Publish notification. * * @param context * @param intent */ @Override public void onReceive(Context context, Intent intent) { NotificationManager notificationManager = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); String reminder = remindersDue(context); // reminder message, or null if no reminders due if (reminder != null) { // build notification NotificationCompat.Builder notificationBuilder = new NotificationCompat.Builder(context) .setSmallIcon(R.drawable.ic_notification) .setContentTitle(context.getString(R.string.notification_title)) .setContentText(reminder); // set intent for when user clicks on notification Intent clickIntent = new Intent(context, MainActivity.class); TaskStackBuilder stackBuilder = TaskStackBuilder.create(context); stackBuilder.addNextIntent(clickIntent); PendingIntent pendingIntent = stackBuilder.getPendingIntent( 0, PendingIntent.FLAG_UPDATE_CURRENT); notificationBuilder.setContentIntent(pendingIntent); notificationBuilder.setAutoCancel(true); // cancel intent when user clicks // publish notification notificationManager.notify(NOTIFICATION_ID, notificationBuilder.build()); } else { notificationManager.cancel(NOTIFICATION_ID); // cancel notification if there is one } } /** * Builds reminder message for who to reconnect with. * * @param context * @return reminder message, or null */ @Nullable private String remindersDue(Context context) { long currentTime = System.currentTimeMillis(); DBAdapter db = new DBAdapter(context); db.open(); Cursor cursor = db.getRowsBefore(currentTime); // cursor with all rows db.close(); int dueCount = cursor.getCount(); if (dueCount > 0) { // at least one reminder is due String firstReconnect = cursor.getString(DBAdapter.COL_NAME_INDEX); String reminder = context.getString(R.string.reconnect_with, firstReconnect); if (dueCount > 1) { // more than one reminder is due reminder += context.getString(R.string.other, dueCount - 1); } if (dueCount > 2) { reminder += context.getString(R.string.plural_s); } cursor.close(); return reminder; } else { // empty cursor return null; } } }
package org.zstack.header.network.l2; import java.util.*; public class L2NetworkType { private static Map<String, L2NetworkType> types = Collections.synchronizedMap(new HashMap<String, L2NetworkType>()); private final String typeName; private boolean exposed = true; public static boolean hasType(String typeName) { return types.containsKey(typeName); } public L2NetworkType(String typeName) { this.typeName = typeName; types.put(typeName, this); } public L2NetworkType(String typeName, boolean exposed) { this(typeName); this.exposed = exposed; } public boolean isExposed() { return exposed; } public void setExposed(boolean exposed) { this.exposed = exposed; } public static L2NetworkType valueOf(String typeName) { L2NetworkType type = types.get(typeName); if (type == null) { throw new IllegalArgumentException("L2NetworkType type: " + typeName + " was not registered by any L2NetworkFactory"); } return type; } @Override public String toString() { return typeName; } @Override public boolean equals(Object t) { if (t == null || !(t instanceof L2NetworkType)) { return false; } L2NetworkType type = (L2NetworkType) t; return type.toString().equals(typeName); } @Override public int hashCode() { return typeName.hashCode(); } public static Set<String> getAllTypeNames() { HashSet<String> exposedTypes = new HashSet<String>(); for (L2NetworkType type : types.values()) { if (type.isExposed()) { exposedTypes.add(type.toString()); } } return exposedTypes; } }
/* * Copyright 2019 Maksim Zheravin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.cygnus.exchange.tests.util; import com.google.common.collect.Sets; import io.cygnus.exchange.core.common.CoreSymbolSpecification; import io.cygnus.exchange.core.common.enums.SymbolType; import java.util.Set; public final class TestConstants { public static final int SYMBOL_MARGIN = 5991; public static final int SYMBOL_EXCHANGE = 9269; public static final int SYMBOL_EXCHANGE_FEE = 9340; public static final long UID_1 = 1440001; public static final long UID_2 = 1440002; public static final long UID_3 = 1440003; public static final long UID_4 = 1440004; public static final int SYMBOL_AUTOGENERATED_RANGE_START = 40000; public static final int CURRENECY_AUD = 36; public static final int CURRENECY_BRL = 986; public static final int CURRENECY_CAD = 124; public static final int CURRENECY_CHF = 756; public static final int CURRENECY_CNY = 156; public static final int CURRENECY_CZK = 203; public static final int CURRENECY_DKK = 208; public static final int CURRENECY_EUR = 978; public static final int CURRENECY_GBP = 826; public static final int CURRENECY_HKD = 344; public static final int CURRENECY_JPY = 392; public static final int CURRENECY_KRW = 410; public static final int CURRENECY_MXN = 484; public static final int CURRENECY_MYR = 458; public static final int CURRENECY_NOK = 578; public static final int CURRENECY_NZD = 554; public static final int CURRENECY_PLN = 985; public static final int CURRENECY_RUB = 643; public static final int CURRENECY_SEK = 752; public static final int CURRENECY_SGD = 702; public static final int CURRENECY_THB = 764; public static final int CURRENECY_TRY = 949; public static final int CURRENECY_UAH = 980; public static final int CURRENECY_USD = 840; public static final int CURRENECY_VND = 704; public static final int CURRENECY_XAG = 961; public static final int CURRENECY_XAU = 959; public static final int CURRENECY_ZAR = 710; public static final int CURRENECY_XBT = 3762; // satoshi, 1E-8 public static final int CURRENECY_ETH = 3928; // szabo, 1E-6 public static final int CURRENECY_LTC = 4141; // litoshi, 1E-8 public static final int CURRENECY_XDG = 4142; public static final int CURRENECY_GRC = 4143; public static final int CURRENECY_XPM = 4144; public static final int CURRENECY_XRP = 4145; public static final int CURRENECY_DASH = 4146; public static final int CURRENECY_XMR = 4147; public static final int CURRENECY_XLM = 4148; public static final int CURRENECY_ETC = 4149; public static final int CURRENECY_ZEC = 4150; public static final Set<Integer> CURRENCIES_FUTURES = Sets.newHashSet( CURRENECY_USD, CURRENECY_EUR); public static final Set<Integer> CURRENCIES_EXCHANGE = Sets.newHashSet( CURRENECY_ETH, CURRENECY_XBT); public static final Set<Integer> ALL_CURRENCIES = Sets.newHashSet( CURRENECY_AUD, CURRENECY_BRL, CURRENECY_CAD, CURRENECY_CHF, CURRENECY_CNY, CURRENECY_CZK, CURRENECY_DKK, CURRENECY_EUR, CURRENECY_GBP, CURRENECY_HKD, CURRENECY_JPY, CURRENECY_KRW, CURRENECY_MXN, CURRENECY_MYR, CURRENECY_NOK, CURRENECY_NZD, CURRENECY_PLN, CURRENECY_RUB, CURRENECY_SEK, CURRENECY_SGD, CURRENECY_THB, CURRENECY_TRY, CURRENECY_UAH, CURRENECY_USD, CURRENECY_VND, CURRENECY_XAG, CURRENECY_XAU, CURRENECY_ZAR, CURRENECY_XBT, CURRENECY_ETH, CURRENECY_LTC, CURRENECY_XDG, CURRENECY_GRC, CURRENECY_XPM, CURRENECY_XRP, CURRENECY_DASH, CURRENECY_XMR, CURRENECY_XLM, CURRENECY_ETC, CURRENECY_ZEC); public static final CoreSymbolSpecification SYMBOLSPEC_EUR_USD = CoreSymbolSpecification.builder() .symbolId(SYMBOL_MARGIN) .type(SymbolType.FUTURES_CONTRACT) .baseCurrency(CURRENECY_EUR) .quoteCurrency(CURRENECY_USD) .baseScaleK(1) .quoteScaleK(1) .marginBuy(2200) .marginSell(3210) .takerFee(0) .makerFee(0) .build(); public static final CoreSymbolSpecification SYMBOLSPECFEE_USD_JPY = CoreSymbolSpecification.builder() .symbolId(SYMBOL_MARGIN) .type(SymbolType.FUTURES_CONTRACT) .baseCurrency(CURRENECY_USD) .quoteCurrency(CURRENECY_JPY) .baseScaleK(1_000_00) // 1K USD "micro" lot .quoteScaleK(10) // 10 JPY step .marginBuy(5_000) // effective leverage ~21 .marginSell(6_000) // effective leverage ~18 .takerFee(3) .makerFee(2) .build(); public static final CoreSymbolSpecification SYMBOLSPEC_ETH_XBT = CoreSymbolSpecification.builder() .symbolId(SYMBOL_EXCHANGE) .type(SymbolType.CURRENCY_EXCHANGE_PAIR) .baseCurrency(CURRENECY_ETH) // base = szabo .quoteCurrency(CURRENECY_XBT) // quote = satoshi .baseScaleK(100_000) // 1 lot = 100K szabo (0.1 ETH) .quoteScaleK(10) // 1 step = 10 satoshi .takerFee(0) .makerFee(0) .build(); // symbol with fees public static final CoreSymbolSpecification SYMBOLSPECFEE_XBT_LTC = CoreSymbolSpecification.builder() .symbolId(SYMBOL_EXCHANGE_FEE) .type(SymbolType.CURRENCY_EXCHANGE_PAIR) .baseCurrency(CURRENECY_XBT) // base = satoshi .quoteCurrency(CURRENECY_LTC) // quote = litoshi .baseScaleK(1_000_000) // 1 lot = 1M satoshi (0.01 BTC) .quoteScaleK(10_000) // 1 step = 10K litoshi .takerFee(1900) // taker fee 1900 litoshi per 1 lot .makerFee(700) // maker fee 700 litoshi per 1 lot .build(); public static int getCurrency(String currency) { switch (currency) { case "USD": return CURRENECY_USD; case "XBT": return CURRENECY_XBT; case "ETH": return CURRENECY_ETH; } throw new RuntimeException("Unknown currency [" + currency + "]"); } }
package com.devonfw.module.basic.common.api.query; /** * Enum defining available syntaxes for a match pattern in a LIKE-clause. While databases typically require {@link #SQL} * syntax, human user expect {@link #GLOB} syntax in search forms. Therefore this enum also supports * {@link #convert(String, LikePatternSyntax, boolean) conversion} from one syntax to another. * * @since 3.0.0 */ public enum LikePatternSyntax { /** * Glob syntax that is typically expected by end-users and supported by typical search forms. It uses asterisk ('*') * for {@link #getAny() any wildcard} and question-mark ('?') for {@link #getSingle() single wildcard}. */ GLOB('*', '?'), /** * SQL syntax that is typically required by databases. It uses percent ('%') for {@link #getAny() any wildcard} and * underscore ('_') for {@link #getSingle() single wildcard}. */ SQL('%', '_'); /** The escape character. */ public static final char ESCAPE = '\\'; private final char any; private final char single; private LikePatternSyntax(char any, char single) { this.any = any; this.single = single; } /** * @return the wildcard character that matches any string including the {@link String#isEmpty() empty} string. */ public char getAny() { return this.any; } /** * @return the wildcard character that matches exactly one single character. */ public char getSingle() { return this.single; } /** * @param pattern the LIKE pattern in the given {@link LikePatternSyntax}. * @param syntax the {@link LikePatternSyntax} of the given {@code pattern}. * @return the given {@code pattern} converted to this {@link LikePatternSyntax}. */ public String convert(String pattern, LikePatternSyntax syntax) { return convert(pattern, syntax, false); } /** * @param pattern the LIKE pattern in the given {@link LikePatternSyntax}. * @param syntax the {@link LikePatternSyntax} of the given {@code pattern}. * @param matchSubstring - {@code true} if the given {@code pattern} shall also match substrings, {@code false} * otherwise. * @return the given {@code pattern} converted to this {@link LikePatternSyntax}. */ public String convert(String pattern, LikePatternSyntax syntax, boolean matchSubstring) { if ((pattern == null) || pattern.isEmpty()) { if (matchSubstring) { return Character.toString(this.any); } else { return pattern; } } if (this == syntax) { String result = pattern; if (matchSubstring) { if (pattern.charAt(0) != this.any) { result = this.any + result; } int lastIndex = pattern.length() - 1; if ((pattern.charAt(lastIndex) != this.any) || ((lastIndex > 0) && (pattern.charAt(lastIndex - 1) == ESCAPE))) { result = result + this.any; } } return result; } int length = pattern.length(); StringBuilder sb = new StringBuilder(length + 8); boolean lastWildcardAny = false; for (int i = 0; i < length; i++) { lastWildcardAny = false; char c = pattern.charAt(i); if (c == syntax.any) { c = this.any; lastWildcardAny = true; } else if (c == syntax.single) { c = this.single; } else if ((c == this.any) || (c == this.single) || (c == ESCAPE)) { if ((i == 0) && matchSubstring) { sb.append(this.any); } sb.append(ESCAPE); } if (matchSubstring && (i == 0) && !lastWildcardAny) { sb.append(this.any); } sb.append(c); } if (matchSubstring && !lastWildcardAny) { sb.append(this.any); } return sb.toString(); } /** * @param pattern the string value that may be a pattern. * @return the {@link LikePatternSyntax} for the given {@code pattern} or {@code null} if the given {@code pattern} * does not contain any wildcards. */ public static LikePatternSyntax autoDetect(String pattern) { if ((pattern == null) || pattern.isEmpty()) { return null; } for (LikePatternSyntax syntax : values()) { if (pattern.indexOf(syntax.any) > 0) { return syntax; } else if (pattern.indexOf(syntax.single) > 0) { return syntax; } } return null; } }
package com.hashir.test; public class SampleClass { public int add(int num1, int num2) { return num1 + num2; } }
package app.andrey_voroshkov.chorus_laptimer; import android.hardware.usb.UsbDeviceConnection; import android.hardware.usb.UsbManager; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.text.TextUtils; import android.util.Log; import com.hoho.android.usbserial.driver.UsbSerialDriver; import com.hoho.android.usbserial.driver.UsbSerialPort; import com.hoho.android.usbserial.driver.UsbSerialProber; import java.io.IOException; import java.nio.charset.Charset; import java.util.List; import java.util.concurrent.CountDownLatch; import static android.content.ContentValues.TAG; /** * Created by Andrey_Voroshkov on 10/15/2017. */ public class USBService implements Connection{ final static String KEY_MSG_TYPE = "msg"; final static String KEY_MSG_DATA = "data"; final static int MSG_ON_CONNECT = 7771; final static int MSG_ON_DISCONNECT = 7772; final static int MSG_ON_RECEIVE = 7773; final static int MSG_ON_CONNECTION_FAIL = 7774; final static int SEND_TIMEOUT = 100; final static int READ_TIMEOUT = 100; ConnectionListener mConnectionListener = null; Handler mActivityHandler = null; UsbManager mUsbManager = null; ListenerThread mListenerThread = null; SenderThread mSenderThread = null; volatile UsbSerialPort mPort = null; USBService(UsbManager usbManager) { mActivityHandler = new HandlerExtension(); mUsbManager = usbManager; } private class HandlerExtension extends Handler { @Override public void handleMessage(Message message){ if (mConnectionListener == null) return; Bundle msgBundle = message.getData(); int msgType = msgBundle.getInt(KEY_MSG_TYPE); String data = msgBundle.getString(KEY_MSG_DATA); switch(msgType) { case MSG_ON_CONNECT: mConnectionListener.onConnected(data); break; case MSG_ON_CONNECTION_FAIL: mConnectionListener.onConnectionFailed(data); break; case MSG_ON_DISCONNECT: mConnectionListener.onDisconnected(); break; case MSG_ON_RECEIVE: mConnectionListener.onDataReceived(data); break; } } } private Message composeMessage(int type, String data) { Bundle msgBundle = new Bundle(); msgBundle.putInt(KEY_MSG_TYPE, type); msgBundle.putString(KEY_MSG_DATA, data); Message msg = new Message(); msg.setData(msgBundle); return msg; } public void setConnectionListener(ConnectionListener listener) { mConnectionListener = listener; } @Override public void connect() { if (mPort != null) return; if (mUsbManager == null) { mActivityHandler.sendMessage(composeMessage(MSG_ON_CONNECTION_FAIL, "UsbManager not found")); return; } List<UsbSerialDriver> availableDrivers = UsbSerialProber.getDefaultProber().findAllDrivers(mUsbManager); if (availableDrivers.isEmpty()) { mActivityHandler.sendMessage(composeMessage(MSG_ON_CONNECTION_FAIL, "No available USB Device Drivers found")); return; } // Open a connection to the first available driver. UsbSerialDriver driver = availableDrivers.get(0); UsbDeviceConnection connection = mUsbManager.openDevice(driver.getDevice()); if (connection == null) { // You probably need to call UsbManager.requestPermission(driver.getDevice(), ..) mActivityHandler.sendMessage(composeMessage(MSG_ON_CONNECTION_FAIL, "Cannot open USB data port")); return; } // Read some data! Most have just one port (port 0). mPort = driver.getPorts().get(0); try { mPort.open(connection); mPort.setParameters(115200, 8, UsbSerialPort.STOPBITS_1, UsbSerialPort.PARITY_NONE); } catch (IOException e) { Log.e(TAG, "Error setting up device: " + e.getMessage(), e); try { mPort.close(); } catch (IOException e2) { // Ignore. } mPort = null; mActivityHandler.sendMessage(composeMessage(MSG_ON_CONNECTION_FAIL, e.toString())); return; } // create a latch to make sure that sending thread is started before we send anything to USB CountDownLatch senderInitializedSignal = new CountDownLatch(mSenderThread == null ? 1 : 0); if (mListenerThread == null) { mListenerThread = new ListenerThread(); mListenerThread.start(); } if (mSenderThread == null) { mSenderThread = new SenderThread(senderInitializedSignal); mSenderThread.start(); } try { senderInitializedSignal.await(); } catch (InterruptedException e) { e.printStackTrace(); } String deviceName; try { deviceName = mPort.getDriver().getClass().getSimpleName(); } catch(Exception e) { deviceName = ""; } mActivityHandler.sendMessage(composeMessage(MSG_ON_CONNECT, deviceName)); } @Override public void disconnect() { // don't stop the sender thread as it uses a Looper and cannot be easily stopped (to the best of my current knowledge) // but stop the listener thread if (mListenerThread != null) { mListenerThread.interrupt(); try { mListenerThread.join(); mListenerThread = null; } catch(InterruptedException e) { // do nothing ? } } if (mPort != null) { try { mPort.close(); } catch(Exception e) { // TODO: handle exception here ? } finally { mPort = null; } } mActivityHandler.sendMessage(composeMessage(MSG_ON_DISCONNECT, "disconnect")); } public void send(String data) { if (mPort == null) return; if (mSenderThread == null) return; mSenderThread.send(data); } private class SenderThread extends Thread { CountDownLatch mInitializedSignal; SenderThread(CountDownLatch initializedSignal) { super(); mInitializedSignal = initializedSignal; } private Handler mSendHandler; public void run () { // prepare handler to process send commands via messages to SenderThread Looper.prepare(); mSendHandler = new Handler(); mInitializedSignal.countDown(); Looper.loop(); } public void send(final String data) { if (mPort == null) return; // TODO: check that there are no situations when we try sending to closed port mSendHandler.post(new Runnable() { @Override public void run() { try { mPort.write(data.getBytes(), SEND_TIMEOUT); } catch (Exception e) { disconnect(); //TODO: implement some handling here! } } }); } } private class ListenerThread extends Thread { byte[] mReceiveArray = new byte[4098]; String mLastIncompleteChunk = ""; private void parseAndCallback(String str) { if (mConnectionListener == null || str.length() == 0) return; char lastChar = str.charAt(str.length()-1); boolean isLastChunkIncomplete = lastChar != '\n'; String[] chunks = TextUtils.split(str, "\n"); int lastChunkIndex = chunks.length - 1; if (!mLastIncompleteChunk.isEmpty()) { chunks[0] = mLastIncompleteChunk + chunks[0]; } if (isLastChunkIncomplete) { mLastIncompleteChunk = chunks[lastChunkIndex]; chunks[lastChunkIndex] = ""; } else { mLastIncompleteChunk = ""; } for (String chunk : chunks) { if (chunk.isEmpty()) continue; mActivityHandler.sendMessage(composeMessage(MSG_ON_RECEIVE, chunk)); } } public void run() { while (!isInterrupted()) { if (mPort == null) continue; try { int len = 0; len = mPort.read(mReceiveArray, READ_TIMEOUT); if (len > 0) { Charset charset = Charset.forName("ASCII"); String result = new String(mReceiveArray, 0, len, charset); parseAndCallback(result); } } catch (Exception e) { disconnect(); //TODO: implement some handling here! break; } } } } }
/* * Copyright 2017, Oath Inc. * Licensed under the Apache License, Version 2.0 * See LICENSE file in project root for terms. */ package com.yahoo.elide.standalone; import static com.yahoo.elide.standalone.config.ElideResourceConfig.ELIDE_STANDALONE_SETTINGS_ATTR; import com.codahale.metrics.servlet.InstrumentedFilter; import com.codahale.metrics.servlets.AdminServlet; import com.codahale.metrics.servlets.HealthCheckServlet; import com.codahale.metrics.servlets.MetricsServlet; import com.yahoo.elide.resources.DefaultOpaqueUserFunction; import com.yahoo.elide.security.checks.Check; import com.yahoo.elide.standalone.config.ElideResourceConfig; import com.yahoo.elide.standalone.config.ElideStandaloneSettings; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.glassfish.jersey.servlet.ServletContainer; import lombok.extern.slf4j.Slf4j; import java.util.EnumSet; import java.util.Map; import javax.servlet.DispatcherType; import javax.ws.rs.core.SecurityContext; @Slf4j public class ElideStandalone { private final ElideStandaloneSettings elideStandaloneSettings; private Server jettyServer; /** * Constructor * * @param elideStandaloneSettings Elide standalone configuration settings. */ public ElideStandalone(ElideStandaloneSettings elideStandaloneSettings) { this.elideStandaloneSettings = elideStandaloneSettings; } /** * Constructor * * @param checkMappings Check mappings to use for service. */ public ElideStandalone(Map<String, Class<? extends Check>> checkMappings) { this(checkMappings, SecurityContext::getUserPrincipal); } /** * Constructor * * @param checkMappings Check mappings to use for service. * @param userExtractionFn User extraction function to use for service. */ public ElideStandalone(Map<String, Class<? extends Check>> checkMappings, DefaultOpaqueUserFunction userExtractionFn) { this(new ElideStandaloneSettings() { @Override public Map<String, Class<? extends Check>> getCheckMappings() { return checkMappings; } @Override public DefaultOpaqueUserFunction getUserExtractionFunction() { return userExtractionFn; } }); } /** * Start the Elide service. * * This method blocks until the server exits. */ public void start() throws Exception { start(true); } /** * Start the Elide service. * * @param block - Whether or not to wait for the server to shutdown. */ public void start(boolean block) throws Exception { ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/"); log.info("Starting jetty server on port: {}", elideStandaloneSettings.getPort()); jettyServer = new Server(elideStandaloneSettings.getPort()); jettyServer.setHandler(context); context.setAttribute(ELIDE_STANDALONE_SETTINGS_ATTR, elideStandaloneSettings); if (elideStandaloneSettings.enableJSONAPI()) { ServletHolder jerseyServlet = context.addServlet(ServletContainer.class, elideStandaloneSettings.getJsonApiPathSpec()); jerseyServlet.setInitOrder(0); jerseyServlet.setInitParameter("jersey.config.server.provider.packages", "com.yahoo.elide.resources"); jerseyServlet.setInitParameter("javax.ws.rs.Application", ElideResourceConfig.class.getCanonicalName()); } if (elideStandaloneSettings.enableGraphQL()) { ServletHolder jerseyServlet = context.addServlet(ServletContainer.class, elideStandaloneSettings.getGraphQLApiPathSepc()); jerseyServlet.setInitOrder(0); jerseyServlet.setInitParameter("jersey.config.server.provider.packages", "com.yahoo.elide.graphql"); jerseyServlet.setInitParameter("javax.ws.rs.Application", ElideResourceConfig.class.getCanonicalName()); } if (elideStandaloneSettings.enableServiceMonitoring()) { FilterHolder instrumentedFilterHolder = new FilterHolder(InstrumentedFilter.class); instrumentedFilterHolder.setName("instrumentedFilter"); instrumentedFilterHolder.setAsyncSupported(true); context.addFilter(instrumentedFilterHolder, "/*", EnumSet.of(DispatcherType.REQUEST)); context.setAttribute(HealthCheckServlet.HEALTH_CHECK_REGISTRY, ElideResourceConfig.getHealthCheckRegistry()); context.setAttribute(InstrumentedFilter.REGISTRY_ATTRIBUTE, ElideResourceConfig.getMetricRegistry()); context.setAttribute(MetricsServlet.METRICS_REGISTRY, ElideResourceConfig.getMetricRegistry()); context.addServlet(AdminServlet.class, "/stats/*"); } elideStandaloneSettings.updateServletContextHandler(context); try { jettyServer.start(); log.info("Jetty started!"); if (block) { jettyServer.join(); } } catch (Exception e) { log.error("Unexpected exception caught: {}", e.getMessage(), e); throw e; } finally { if (block) { jettyServer.destroy(); } } } /** * Stop the Elide service. */ public void stop() throws Exception { jettyServer.stop(); jettyServer.destroy(); } }
package yet.ui.util; import android.os.Build; import android.os.Bundle; import android.telephony.TelephonyManager; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Collections; import java.util.Locale; import yet.util.MyDate; import yet.util.log.xlog; public class Dumps { /** * 打印一个类的所有方法 * * @param cls */ public static void classMethod(Class<?> cls) { ArrayList<String> all = new ArrayList<String>(64); for (Method m : cls.getMethods()) { StringBuilder sb = new StringBuilder(128); sb.append(m.getName()); sb.append("("); boolean first = true; for (Class<?> p : m.getParameterTypes()) { if (first) { first = false; } else { sb.append(","); } sb.append(p.getSimpleName()); } sb.append(") --> "); sb.append(m.getReturnType().getSimpleName()); if (Modifier.isStatic(m.getModifiers())) { sb.append(" [static]"); } all.add(sb.toString()); } Collections.sort(all); for (String m : all) { xlog.INSTANCE.d(m); } } public static void bundle(Bundle b) { dumpBundle("", b); } private static void dumpBundle(String prefix, Bundle b) { if (b != null) { for (String key : b.keySet()) { Object value = b.get(key); if (value instanceof Bundle) { xlog.INSTANCE.d(prefix, key); dumpBundle(prefix + " ", (Bundle) value); } else { xlog.INSTANCE.d(prefix, key, value, value == null ? " null " : value.getClass().getSimpleName()); } } } } @SuppressWarnings("deprecation") public static void build() { xlog.INSTANCE.d("BOARD", Build.BOARD); xlog.INSTANCE.d("BOOTLOADER", Build.BOOTLOADER); xlog.INSTANCE.d("BRAND", Build.BRAND); xlog.INSTANCE.d("CPU_ABI", Build.CPU_ABI); xlog.INSTANCE.d("CPU_ABI2", Build.CPU_ABI2); xlog.INSTANCE.d("DEVICE", Build.DEVICE); xlog.INSTANCE.d("DISPLAY", Build.DISPLAY); xlog.INSTANCE.d("FINGERPRINT", Build.FINGERPRINT); xlog.INSTANCE.d("HARDWARE", Build.HARDWARE); xlog.INSTANCE.d("HOST", Build.HOST); xlog.INSTANCE.d("ID", Build.ID); xlog.INSTANCE.d("MANUFACTURER", Build.MANUFACTURER); xlog.INSTANCE.d("MODEL", Build.MODEL); xlog.INSTANCE.d("PRODUCT", Build.PRODUCT); xlog.INSTANCE.d("RADIO", Build.RADIO); xlog.INSTANCE.d("SERIAL", Build.SERIAL); xlog.INSTANCE.d("TAGS", Build.TAGS); xlog.INSTANCE.d("TIME", new MyDate(Build.TIME, Locale.getDefault()).formatDate()); xlog.INSTANCE.d("TYPE", Build.TYPE); xlog.INSTANCE.d("UNKNOWN", Build.UNKNOWN); xlog.INSTANCE.d("USER", Build.USER); xlog.INSTANCE.d("getRadioVersion", Build.getRadioVersion()); } public static void telMgr(TelephonyManager tm) { xlog.INSTANCE.d("Dump TelephonyManager"); xlog.INSTANCE.d("line 1 number:", tm.getLine1Number()); xlog.INSTANCE.d("country iso: ", tm.getNetworkCountryIso()); xlog.INSTANCE.d("getNetworkOperator", tm.getNetworkOperator()); xlog.INSTANCE.d("getNetworkOperatorName", tm.getNetworkOperatorName()); xlog.INSTANCE.d("getNetworkType", tm.getNetworkType()); xlog.INSTANCE.d("getPhoneType", tm.getPhoneType()); xlog.INSTANCE.d("getSimCountryIso", tm.getSimCountryIso()); xlog.INSTANCE.d("getSimOperator", tm.getSimOperator()); xlog.INSTANCE.d("getSimOperatorName ", tm.getSimOperatorName()); xlog.INSTANCE.d("getSimSerialNumber ", tm.getSimSerialNumber()); xlog.INSTANCE.d("getSimState ", tm.getSimState()); xlog.INSTANCE.d("getSubscriberId ", tm.getSubscriberId()); xlog.INSTANCE.d("getDeviceId ", tm.getDeviceId()); xlog.INSTANCE.d("getDeviceSoftwareVersion ", tm.getDeviceSoftwareVersion()); } }
package com.evolvus.sandstorm; /** * * @author EVOLVUS\shrimank * */ public class InvalidFileException extends Exception { /** * */ private static final long serialVersionUID = -2776450355349830013L; public InvalidFileException() { super(); } public InvalidFileException(String message) { super(message); } public InvalidFileException(Throwable cause) { super(cause); } public InvalidFileException(String message, Throwable cause) { super(message, cause); } public InvalidFileException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } }
package com.vincent.app.kafka.producer.service; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.vincent.app.kafka.producer.constants.KafkaTopicConstant; import com.vincent.app.kafka.producer.model.Message; import lombok.extern.slf4j.Slf4j; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Date; import java.util.concurrent.atomic.AtomicLong; /** * kafka消息生产 */ @Component @Slf4j public class KafkaProducer { @Resource private KafkaTemplate<String, String> kafkaTemplate; private Gson gson = new GsonBuilder().create(); AtomicLong atomicLong = new AtomicLong(0L); public void send() { String msg = "中国崛起:"+atomicLong.incrementAndGet(); send(msg); } public void send(final String msg) { Message message = new Message(); message.setId(System.currentTimeMillis()); message.setMsg(msg); message.setSendTime(new Date()); log.info("+++++++++++++++++++++send message = {}", gson.toJson(message)); kafkaTemplate.send(KafkaTopicConstant.testTopicName, message.getId().toString() , gson.toJson(message)); } }
package org.opencds.cqf.cql.elm.execution; import org.cqframework.cql.elm.execution.ValueSetRef; import org.cqframework.cql.elm.execution.ValueSetDef; import org.cqframework.cql.elm.execution.CodeSystemRef; import org.cqframework.cql.elm.execution.CodeSystemDef; import org.opencds.cqf.cql.execution.Context; import org.opencds.cqf.cql.runtime.Code; import org.opencds.cqf.cql.runtime.Concept; import org.opencds.cqf.cql.terminology.CodeSystemInfo; import org.opencds.cqf.cql.terminology.TerminologyProvider; import org.opencds.cqf.cql.terminology.ValueSetInfo; import java.util.List; import java.util.ArrayList; /* in(code String, valueset ValueSetRef) Boolean in(code Code, valueset ValueSetRef) Boolean in(concept Concept, valueset ValueSetRef) Boolean The in (Valueset) operators determine whether or not a given code is in a particular valueset. Note that these operators can only be invoked by referencing a defined valueset. For the String overload, if the given valueset contains a code with an equivalent code element, the result is true. For the Code overload, if the given valueset contains an equivalent code, the result is true. For the Concept overload, if the given valueset contains a code equivalent to any code in the given concept, the result is true. If the code argument is null, the result is null. */ /** * Created by Chris Schuler on 7/13/2016 */ public class InValueSetEvaluator extends org.cqframework.cql.elm.execution.InValueSet { public Object inValueSet(Context context, Object code, Object valueset) { if (code == null) { return null; } // Resolve ValueSetRef & CodeSystemRef -- Account for multiple codesystems represented within a valueset ValueSetDef vsd = resolveVSR(context, (ValueSetRef)valueset); List<CodeSystemDef> codeSystemDefs = new ArrayList<>(); for (CodeSystemRef csr : vsd.getCodeSystem()) { codeSystemDefs.add(resolveCSR(context, csr)); } List<CodeSystemInfo> codeSystemInfos = new ArrayList<>(); if (codeSystemDefs.size() > 0) { for (CodeSystemDef csd : codeSystemDefs) { codeSystemInfos.add(new CodeSystemInfo().withId(csd.getId()).withVersion(csd.getVersion())); } } // TODO: find better solution than this -- temporary solution else { codeSystemInfos.add(new CodeSystemInfo().withId(null).withVersion(null)); } List<ValueSetInfo> valueSetInfos = new ArrayList<>(); for (CodeSystemInfo csi : codeSystemInfos) { valueSetInfos.add(new ValueSetInfo().withId(vsd.getId()).withVersion(vsd.getVersion()).withCodeSystem(csi)); } TerminologyProvider provider = context.resolveTerminologyProvider(); // perform operation if (code instanceof String) { for (ValueSetInfo vsi : valueSetInfos) { if (provider.in(new Code().withCode((String)code), vsi)) { return true; } } return false; } else if (code instanceof Code) { for (ValueSetInfo vsi : valueSetInfos) { if (provider.in((Code)code, vsi)) { return true; } } return false; } else if (code instanceof Concept) { for (ValueSetInfo vsi : valueSetInfos) { for (Code codes : ((Concept)code).getCodes()) { if (codes == null) return null; if (provider.in(codes, vsi)) return true; } return false; } } throw new IllegalArgumentException(String.format("Cannot InValueSet Code arguments of type '%s'.", code.getClass().getName())); } public ValueSetDef resolveVSR(Context context, ValueSetRef valueset) { return context.resolveValueSetRef(valueset.getLibraryName(), valueset.getName()); } public CodeSystemDef resolveCSR(Context context, CodeSystemRef codesystem) { return context.resolveCodeSystemRef(codesystem.getLibraryName(), codesystem.getName()); } @Override public Object evaluate(Context context) { Object code = getCode().evaluate(context); Object valueset = getValueset(); return context.logTrace(this.getClass(), inValueSet(context, code, valueset), code, valueset); } }
package com.maijia.domain.executor; import rx.Scheduler; /** * Created by XiaoKong on 2017/6/14. * Desription 操作UI线程 */ public interface PostExecutionThread { Scheduler getScheduler(); }
package io.deepstream; import com.google.j2objc.annotations.ObjectiveCName; import java.util.EnumSet; /** * Provides all the different events that may occur. Most are related to errors for finer debugging and logging */ public enum Event { /** * To indicate a connection has not been authenticated longer than expected */ UNAUTHENTICATED_CONNECTION_TIMEOUT, /** * To indicate a connection error was encountered */ CONNECTION_ERROR, /** * To indicate the connection state was changed */ CONNECTION_STATE_CHANGED, /** * To indicate an subscription ack has timed out */ ACK_TIMEOUT, /** * Indicates the credentials provided were incorrect */ INVALID_AUTH_DATA, /** * To indicate a record read from deepstream has timed out */ RESPONSE_TIMEOUT, /** * To indicate a record read from deepstream has timed out while attempting to read from cache */ CACHE_RETRIEVAL_TIMEOUT, /** * To indicate a record read from deepstream has timed out while attempting to read from storage */ STORAGE_RETRIEVAL_TIMEOUT, /** * To indicate a record delete timed out */ DELETE_TIMEOUT, /** * To indicate the client has received a message that it didn't expect */ UNSOLICITED_MESSAGE, /** * To indicate the client has received a message that can't be parsed */ MESSAGE_PARSE_ERROR, /** * To indicate the client has received a record update that conflicts with the one * in cache */ VERSION_EXISTS, /** * To indicate the client has attempted to perform an operation before authenticating */ NOT_AUTHENTICATED, /** * To indicate the client has attempted to add a listen pattern twice */ LISTENER_EXISTS, /** * To indicate the client has attempted to remove a pattern that doesn't exist */ NOT_LISTENING, /** * To indicate the client has attempted to login with incorrect credentials too many times */ TOO_MANY_AUTH_ATTEMPTS, /** * To indicate the client has attempted to perform an action a connection that has been closed */ IS_CLOSED, /** * To indicate the client has attempted to get a snapshot of a record that doesn't exist */ RECORD_NOT_FOUND, /** * To indicate the client has attempted to perform an action they are not permissioned too */ MESSAGE_DENIED, /** * To indicate multi subscriptions */ MULTIPLE_SUBSCRIPTIONS; @ObjectiveCName("getEvent:") static Event getEvent(String event ) { for( Event s : EnumSet.allOf( Event.class ) ) { if( s.name().equals( event ) ) { return s; } } return null; } }
package io.fabric8.launcher.service.openshift.impl; import io.fabric8.kubernetes.api.Controller; import io.fabric8.kubernetes.client.KubernetesClientException; import io.fabric8.openshift.api.model.Project; import io.fabric8.openshift.client.OpenShiftClient; /** * This controller has fixes for starter clusters * * eg. Starter cluster does not immediately returns that a project was created in * https://console.starter-us-east-1.openshift.com/apis/project.openshift.io/v1/projects for example, so the checkNamespace method * must act differently * * @author <a href="mailto:ggastald@redhat.com">George Gastaldi</a> */ class OpenShiftController extends Controller { OpenShiftController(OpenShiftClient client) { super(client); } @Override public boolean checkNamespace(String namespaceName) { OpenShiftClient client = getOpenShiftClientOrNull(); try { Project project = client.projects().withName(namespaceName).get(); return project != null; } catch (KubernetesClientException ignored) { return false; } } }
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.admanager.jaxws.v201905; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; /** * * The action used for refreshing the master playlists of {@link LiveStreamEvent} objects. * * <p>This action will only get applied to live streams with a refresh type of {@link * RefreshType#MANUAL}. * * * <p>Java class for RefreshLiveStreamEventMasterPlaylists complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="RefreshLiveStreamEventMasterPlaylists"> * &lt;complexContent> * &lt;extension base="{https://www.google.com/apis/ads/publisher/v201905}LiveStreamEventAction"> * &lt;sequence> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "RefreshLiveStreamEventMasterPlaylists") public class RefreshLiveStreamEventMasterPlaylists extends LiveStreamEventAction { }
/* * Copyright (C) 2016 android@19code.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.chzz.library.demo.view; import org.chzz.library.demo.model.AppBean; import java.util.List; /** * Create by h4de5ing 2016/5/24 024 */ public interface IAppView { void showLoading(); void hideLoading(); void referData(List<AppBean> list); }
package com.github.charlemaznable.core.net.ohclient; import com.github.charlemaznable.core.net.common.DefaultErrorMappingDisabled; import com.github.charlemaznable.core.net.common.HttpStatus; import com.github.charlemaznable.core.net.common.Mapping; import com.github.charlemaznable.core.net.common.StatusError; import com.github.charlemaznable.core.net.common.StatusErrorMapping; import com.github.charlemaznable.core.net.common.StatusSeriesErrorMapping; import com.github.charlemaznable.core.net.ohclient.OhFactory.OhLoader; import lombok.SneakyThrows; import lombok.val; import okhttp3.mockwebserver.Dispatcher; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import org.junit.jupiter.api.Test; import static com.github.charlemaznable.core.context.FactoryContext.ReflectFactory.reflectFactory; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; public class OhResponseMappingTest { private static OhLoader ohLoader = OhFactory.ohLoader(reflectFactory()); @SneakyThrows @Test public void testOhResponseMapping() { try (val mockWebServer = new MockWebServer()) { mockWebServer.setDispatcher(new Dispatcher() { @Override public MockResponse dispatch(RecordedRequest request) { switch (request.getPath()) { case "/sampleNotFound": return new MockResponse() .setResponseCode(HttpStatus.NOT_FOUND.value()) .setBody(HttpStatus.NOT_FOUND.getReasonPhrase()); case "/sampleClientError": return new MockResponse() .setResponseCode(HttpStatus.FORBIDDEN.value()) .setBody(HttpStatus.FORBIDDEN.getReasonPhrase()); case "/sampleMappingNotFound": return new MockResponse() .setResponseCode(HttpStatus.NOT_FOUND.value()) .setBody(HttpStatus.NOT_FOUND.getReasonPhrase()); case "/sampleMappingClientError": return new MockResponse() .setResponseCode(HttpStatus.FORBIDDEN.value()) .setBody(HttpStatus.FORBIDDEN.getReasonPhrase()); case "/sampleServerError": return new MockResponse() .setResponseCode(HttpStatus.INTERNAL_SERVER_ERROR.value()) .setBody(HttpStatus.INTERNAL_SERVER_ERROR.getReasonPhrase()); default: return new MockResponse().setBody("OK"); } } }); mockWebServer.start(41180); val httpClient = ohLoader.getClient(MappingHttpClient.class); assertThrows(NotFoundException.class, httpClient::sampleNotFound); assertThrows(ClientErrorException.class, httpClient::sampleClientError); assertThrows(NotFoundException2.class, httpClient::sampleMappingNotFound); assertThrows(ClientErrorException2.class, httpClient::sampleMappingClientError); assertThrows(StatusError.class, httpClient::sampleServerError); val defaultHttpClient = ohLoader.getClient(DefaultMappingHttpClient.class); try { defaultHttpClient.sampleNotFound(); } catch (Exception e) { assertEquals(StatusError.class, e.getClass()); StatusError er = (StatusError) e; assertEquals(HttpStatus.NOT_FOUND.value(), er.getStatusCode()); assertEquals(HttpStatus.NOT_FOUND.getReasonPhrase(), er.getMessage()); } try { defaultHttpClient.sampleClientError(); } catch (Exception e) { assertEquals(StatusError.class, e.getClass()); StatusError er = (StatusError) e; assertEquals(HttpStatus.FORBIDDEN.value(), er.getStatusCode()); assertEquals(HttpStatus.FORBIDDEN.getReasonPhrase(), er.getMessage()); } try { defaultHttpClient.sampleMappingNotFound(); } catch (Exception e) { assertEquals(StatusError.class, e.getClass()); StatusError er = (StatusError) e; assertEquals(HttpStatus.NOT_FOUND.value(), er.getStatusCode()); assertEquals(HttpStatus.NOT_FOUND.getReasonPhrase(), er.getMessage()); } try { defaultHttpClient.sampleMappingClientError(); } catch (Exception e) { assertEquals(StatusError.class, e.getClass()); StatusError er = (StatusError) e; assertEquals(HttpStatus.FORBIDDEN.value(), er.getStatusCode()); assertEquals(HttpStatus.FORBIDDEN.getReasonPhrase(), er.getMessage()); } try { defaultHttpClient.sampleServerError(); } catch (Exception e) { assertEquals(StatusError.class, e.getClass()); StatusError er = (StatusError) e; assertEquals(HttpStatus.INTERNAL_SERVER_ERROR.value(), er.getStatusCode()); assertEquals(HttpStatus.INTERNAL_SERVER_ERROR.getReasonPhrase(), er.getMessage()); } val disabledHttpClient = ohLoader.getClient(DisabledMappingHttpClient.class); assertEquals(HttpStatus.NOT_FOUND.getReasonPhrase(), disabledHttpClient.sampleNotFound()); assertEquals(HttpStatus.FORBIDDEN.getReasonPhrase(), disabledHttpClient.sampleClientError()); assertEquals(HttpStatus.NOT_FOUND.getReasonPhrase(), disabledHttpClient.sampleMappingNotFound()); assertEquals(HttpStatus.FORBIDDEN.getReasonPhrase(), disabledHttpClient.sampleMappingClientError()); assertEquals(HttpStatus.INTERNAL_SERVER_ERROR.getReasonPhrase(), disabledHttpClient.sampleServerError()); } } @StatusErrorMapping(status = HttpStatus.NOT_FOUND, exception = NotFoundException.class) @StatusSeriesErrorMapping(statusSeries = HttpStatus.Series.CLIENT_ERROR, exception = ClientErrorException.class) @Mapping("${root}:41180") @OhClient public interface MappingHttpClient { String sampleNotFound(); String sampleClientError(); @StatusErrorMapping(status = HttpStatus.NOT_FOUND, exception = NotFoundException2.class) @StatusSeriesErrorMapping(statusSeries = HttpStatus.Series.CLIENT_ERROR, exception = ClientErrorException2.class) String sampleMappingNotFound(); @StatusErrorMapping(status = HttpStatus.NOT_FOUND, exception = NotFoundException2.class) @StatusSeriesErrorMapping(statusSeries = HttpStatus.Series.CLIENT_ERROR, exception = ClientErrorException2.class) String sampleMappingClientError(); String sampleServerError(); } @Mapping("${root}:41180") @OhClient public interface DefaultMappingHttpClient { void sampleNotFound(); void sampleClientError(); void sampleMappingNotFound(); void sampleMappingClientError(); void sampleServerError(); } @DefaultErrorMappingDisabled @Mapping("${root}:41180") @OhClient public interface DisabledMappingHttpClient { String sampleNotFound(); String sampleClientError(); String sampleMappingNotFound(); String sampleMappingClientError(); String sampleServerError(); } public static class NotFoundException extends StatusError { private static final long serialVersionUID = -6500698707558354057L; public NotFoundException(int statusCode, String message) { super(statusCode, message); } } public static class ClientErrorException extends StatusError { private static final long serialVersionUID = -3870950937253448454L; public ClientErrorException(int statusCode, String message) { super(statusCode, message); } } public static class NotFoundException2 extends StatusError { private static final long serialVersionUID = 8138254149072329848L; public NotFoundException2(int statusCode, String message) { super(statusCode, message); } } public static class ClientErrorException2 extends StatusError { private static final long serialVersionUID = -7855725166604686605L; public ClientErrorException2(int statusCode, String message) { super(statusCode, message); } } }
/* * Copyright (c) 2019-2021 GeyserMC. http://geysermc.org * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * * @author GeyserMC * @link https://github.com/GeyserMC/Geyser */ package org.geysermc.connector.network.translators.java.entity; import com.github.steveice10.mc.protocol.packet.ingame.server.entity.ServerEntityHeadLookPacket; import org.geysermc.connector.entity.Entity; import org.geysermc.connector.network.session.RorySession; import org.geysermc.connector.network.translators.PacketTranslator; import org.geysermc.connector.network.translators.Translator; @Translator(packet = ServerEntityHeadLookPacket.class) public class JavaEntityHeadLookTranslator extends PacketTranslator<ServerEntityHeadLookPacket> { @Override public void translate(ServerEntityHeadLookPacket packet, RorySession session) { Entity entity = session.getEntityCache().getEntityByJavaId(packet.getEntityId()); if (packet.getEntityId() == session.getPlayerEntity().getEntityId()) { entity = session.getPlayerEntity(); } if (entity == null) return; entity.updateHeadLookRotation(session, packet.getHeadYaw()); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.module; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleConfigurationEditor; import com.intellij.openapi.module.ModuleType; import com.intellij.openapi.roots.ui.configuration.DefaultModuleConfigurationEditorFactory; import com.intellij.openapi.roots.ui.configuration.ModuleConfigurationEditorProvider; import com.intellij.openapi.roots.ui.configuration.ModuleConfigurationState; import org.jetbrains.jps.model.java.JavaSourceRootType; import java.util.ArrayList; import java.util.List; /** * @author yole */ public class PythonModuleConfigurationEditorProvider implements ModuleConfigurationEditorProvider { public ModuleConfigurationEditor[] createEditors(final ModuleConfigurationState state) { final Module module = state.getRootModel().getModule(); if (!(ModuleType.get(module) instanceof PythonModuleType)) return ModuleConfigurationEditor.EMPTY; final DefaultModuleConfigurationEditorFactory editorFactory = DefaultModuleConfigurationEditorFactory.getInstance(); final List<ModuleConfigurationEditor> editors = new ArrayList<>(); editors.add(new PyContentEntriesEditor(module, state, false, JavaSourceRootType.SOURCE)); editors.add(editorFactory.createClasspathEditor(state)); return editors.toArray(new ModuleConfigurationEditor[editors.size()]); } }
/** * Copyright (C) 2002-2014 Fabrizio Giustina, the Displaytag team * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.displaytag.util; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.StringUtils; import org.displaytag.properties.MediaTypeEnum; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class provides services for configuring and determining the list of media types an instance of * <code>SupportsMedia</code> supports. (Background: ColumnTag, FooterTag and CaptionTag can be configured to support * a set of media types. This class factors the logic for setting and determining the media instances these objects * support) * @author Jorge L. Barroso * @version $Revision$ ($Author$) */ public final class MediaUtil { /** * logger. */ private static Logger log = LoggerFactory.getLogger(MediaUtil.class); /** * Don't instantiate MediaUtil. */ private MediaUtil() { } /** * Defines a type of object that can support a list of media types. * @author Jorge L. Barroso * @version $Revision$ ($Author$) */ public static interface SupportsMedia { /** * Configure the list of media types this object will support. * @param media The list of media types this object will support. */ void setSupportedMedia(List<MediaTypeEnum> media); /** * Obtain the list of media types this object supports. * @return The list of media types this object supports. */ List<MediaTypeEnum> getSupportedMedia(); } /** * Configures the media supported by an object that implements <code>SupportsMedia</code>. (Background: factored * from ColumnTag) * @param mediaSupporter The <code>SupportsMedia</code> instance being configured to support a list of media. * @param media The media being configured on the given <code>SupportsMedia</code> instance. */ public static void setMedia(SupportsMedia mediaSupporter, String media) { if (mediaSupporter == null) { return; } if (StringUtils.isBlank(media) || media.toLowerCase().indexOf("all") > -1) { mediaSupporter.setSupportedMedia(null); return; } List<MediaTypeEnum> supportedMedia = new ArrayList<MediaTypeEnum>(); String[] values = StringUtils.split(media); for (int i = 0; i < values.length; i++) { String value = values[i]; if (!StringUtils.isBlank(value)) { MediaTypeEnum type = MediaTypeEnum.fromName(value.toLowerCase()); if (type == null) { log.warn("Unrecognized value for attribute \"media\" value=\"" + value + "\""); } else { supportedMedia.add(type); } } } mediaSupporter.setSupportedMedia(supportedMedia); } /** * Is this media supporter configured for the media type? (Background: Factored from ColumnTag) * @param mediaSupporter An object that supports various media. * @param mediaType The currentMedia type * @return true if the media supporter should be displayed for this request */ public static boolean availableForMedia(SupportsMedia mediaSupporter, MediaTypeEnum mediaType) { if (mediaSupporter == null) { return false; } List<MediaTypeEnum> supportedMedia = mediaSupporter.getSupportedMedia(); if (supportedMedia == null) { return true; } return supportedMedia.contains(mediaType); } }
package com.lhjz.portal.repository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; import com.lhjz.portal.Application; @SpringBootTest(classes = Application.class) public class ChatRepositoryTest extends AbstractTestNGSpringContextTests { @Autowired ChatRepository chatRepository; @Test public void countAllNew() { System.out.println(chatRepository.count()); } @Test public void queryMaxAndMinId() { Object[] mm = (Object[]) chatRepository.queryMaxAndMinId(); System.out.println(mm[0]); System.out.println(mm[1]); } }
package test.pl.pawz.zelbet.tests.Shearing; import org.junit.Test; import pl.pawz.zelbet.ULS.ShearingBendRods; import static org.junit.Assert.assertEquals; public class ShearingP10 { private double fCk = 30; private double fYk = 500; private double b = 0.30f; private double a1 = 0.05f; private double a2 = 0.05f; private double h = 0.6f; private double cotTheta = 2f; private double aSl = 0.001f; private double nS1 = 2; private double nS2 = 2; private double fiS1 = 6 * Math.pow(10, -3); private double fiS2 = 12 * Math.pow(10, -3); double nEd = 0.25f; private static final double DELTA = 1e-3; @Test public void value() { double vEd = 0.0624f; double vEdRed = 0.052f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double valueVRdC = res.getVRdC(); System.out.println(valueVRdC); double valueVRdMax = res.getVRdMax(); System.out.println(valueVRdMax); assertEquals(0.123798 ,valueVRdC, 1e-5); assertEquals(0.806482 ,valueVRdMax, 1e-5); } @Test public void test147() { double vEd = 0.147072f; double vEdRed = 0.12256f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.215, result1, DELTA); } @Test public void test195() { double vEd = 0.1956f; double vEdRed = 0.163f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.215, result1, DELTA); } @Test public void test211() { double vEd = 0.2112f; double vEdRed = 0.176f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.215, result1, DELTA); } @Test public void test242() { double vEd = 0.24249f; double vEdRed = 0.202f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.215, result1, DELTA); } @Test public void test258() { double vEd = 0.258f; double vEdRed = 0.215f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.215, result1, DELTA); } @Test public void test273() { double vEd = 0.2736f; double vEdRed = 0.228f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.214, result1, DELTA); } @Test public void test304() { double vEd = 0.3048f; double vEdRed = 0.254f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.192, result1, DELTA); } @Test public void test351() { double vEd = 0.3516f; double vEdRed = 0.293f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.166, result1, DELTA); } @Test public void test398() { double vEd = 0.3984f; double vEdRed = 0.332f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.147, result1, DELTA); } @Test public void test445() { double vEd = 0.4452f; double vEdRed = 0.371f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.131, result1, DELTA); } @Test public void test507() { double vEd = 0.5076f; double vEdRed = 0.423f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.103, result1, DELTA); } @Test public void test554() { double vEd = 0.5544f; double vEdRed = 0.462f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.089, result1, DELTA); } @Test public void test601() { double vEd = 0.6012f; double vEdRed = 0.501f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.078, result1, DELTA); } @Test public void test663() { double vEd = 0.6636f; double vEdRed = 0.553f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.067, result1, DELTA); } @Test public void test694() { double vEd = 0.6948f; double vEdRed = 0.579f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.062, result1, DELTA); } @Test public void test757() { double vEd = 0.7572f; double vEdRed = 0.631f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.055, result1, DELTA); } @Test public void test788() { double vEd = 0.7884f; double vEdRed = 0.657f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.052, result1, DELTA); } @Test public void test804() { double vEd = 0.804f; double vEdRed = 0.670f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.050, result1, DELTA); } @Test public void test807() { double vEd = 0.807f; double vEdRed = 0.6725f; ShearingBendRods res = new ShearingBendRods(h,a1,b,fCk,nEd,aSl,nS1,nS2,fiS1,fiS2,fYk,vEdRed,vEd,0.55,cotTheta,45); double result1 = res.resultShearingStirrups(); System.out.println(result1); assertEquals(0.0, result1, DELTA); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.orchestration.core.metadatacenter; import org.apache.shardingsphere.orchestration.center.CenterRepository; import org.apache.shardingsphere.orchestration.core.metadatacenter.yaml.RuleSchemaMetaDataYamlSwapper; import org.apache.shardingsphere.orchestration.core.metadatacenter.yaml.YamlRuleSchemaMetaData; import org.apache.shardingsphere.underlying.common.metadata.schema.RuleSchemaMetaData; import org.apache.shardingsphere.underlying.common.yaml.engine.YamlEngine; /** * Read/Write meta data from center repository. */ public final class MetaDataCenter { private final MetaDataCenterNode node; private final CenterRepository repository; public MetaDataCenter(final String name, final CenterRepository centerRepository) { this.node = new MetaDataCenterNode(name); this.repository = centerRepository; } /** * Persist rule schema meta data to center repository. * * @param schemaName schema name * @param ruleSchemaMetaData rule schema meta data of the schema */ public void persistMetaDataCenterNode(final String schemaName, final RuleSchemaMetaData ruleSchemaMetaData) { repository.persist(node.getMetaDataCenterNodeFullPath(schemaName), YamlEngine.marshal(new RuleSchemaMetaDataYamlSwapper().swap(ruleSchemaMetaData))); } /** * Load rule schema meta data from center repository. * * @param schemaName schema name * @return rule schema meta data of the schema */ public RuleSchemaMetaData loadRuleSchemaMetaData(final String schemaName) { return new RuleSchemaMetaDataYamlSwapper().swap(YamlEngine.unmarshal(repository.get(node.getMetaDataCenterNodeFullPath(schemaName)), YamlRuleSchemaMetaData.class)); } }
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br> * University of Zurich, Switzerland. * <hr> * <a href="http://www.openolat.org"> * OpenOLAT - Online Learning and Training</a><br> * This file has been modified by the OpenOLAT community. Changes are licensed * under the Apache 2.0 license as the original file. * <p> */ package org.olat.restapi; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.Locale; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.UriBuilder; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPut; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.olat.core.commons.persistence.DB; import org.olat.core.id.Identity; import org.olat.core.util.vfs.VFSContainer; import org.olat.core.util.vfs.VFSItem; import org.olat.course.CourseFactory; import org.olat.course.ICourse; import org.olat.course.nodes.BCCourseNode; import org.olat.course.nodes.CourseNode; import org.olat.course.nodes.CourseNodeConfiguration; import org.olat.course.nodes.CourseNodeFactory; import org.olat.repository.RepositoryEntry; import org.olat.repository.RepositoryEntryStatusEnum; import org.olat.restapi.support.vo.FolderVO; import org.olat.restapi.support.vo.FolderVOes; import org.olat.test.JunitTestHelper; import org.olat.test.JunitTestHelper.IdentityWithLogin; import org.olat.test.OlatRestTestCase; import org.springframework.beans.factory.annotation.Autowired; public class CoursesFoldersTest extends OlatRestTestCase { private RestConnection conn; @Autowired private DB dbInstance; @Before public void setUp() throws Exception { conn = new RestConnection(); dbInstance.intermediateCommit(); } @After public void tearDown() throws Exception { try { if(conn != null) { conn.shutdown(); } } catch (Exception e) { e.printStackTrace(); throw e; } } @Test public void testGetFolderInfo() throws IOException, URISyntaxException { assertTrue(conn.login("administrator", "openolat")); CourseWithBC courseWithBc = deployCourse(); URI uri = UriBuilder.fromUri(getNodeURI(courseWithBc)).build(); HttpGet get = conn.createGet(uri, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(get); assertEquals(200, response.getStatusLine().getStatusCode()); FolderVO folder = conn.parse(response, FolderVO.class); assertNotNull(folder); } /** * Check that the permission check are not to restrictive * @throws IOException * @throws URISyntaxException */ @Test public void testGetFolderInfoByUser() throws IOException, URISyntaxException { IdentityWithLogin user = JunitTestHelper.createAndPersistRndUser("rest-user-bc"); assertTrue(conn.login(user)); CourseWithBC courseWithBc = deployCourse(); URI uri = UriBuilder.fromUri(getNodeURI(courseWithBc)).build(); HttpGet get = conn.createGet(uri, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(get); assertEquals(200, response.getStatusLine().getStatusCode()); FolderVO folder = conn.parse(response, FolderVO.class); assertNotNull(folder); } @Test public void testGetFoldersInfo() throws IOException, URISyntaxException { assertTrue(conn.login("administrator", "openolat")); CourseWithBC courseWithBc = deployCourse(); URI uri = UriBuilder.fromUri(getNodesURI(courseWithBc)).build(); HttpGet get = conn.createGet(uri, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(get); assertEquals(200, response.getStatusLine().getStatusCode()); FolderVOes folders = conn.parse(response, FolderVOes.class); assertNotNull(folders); assertEquals(1, folders.getTotalCount()); assertNotNull(folders.getFolders()); assertEquals(1, folders.getFolders().length); } @Test public void testUploadFile() throws IOException, URISyntaxException { assertTrue(conn.login("administrator", "openolat")); CourseWithBC courseWithBc = deployCourse(); URI uri = UriBuilder.fromUri(getNodeURI(courseWithBc)).path("files").build(); //create single page URL fileUrl = CoursesFoldersTest.class.getResource("singlepage.html"); assertNotNull(fileUrl); File file = new File(fileUrl.toURI()); HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true); conn.addMultipart(method, file.getName(), file); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); VFSContainer folder = BCCourseNode.getNodeFolderContainer((BCCourseNode)courseWithBc.bcNode, courseWithBc.course.getCourseEnvironment()); VFSItem item = folder.resolve(file.getName()); assertNotNull(item); } @Test public void testUploadFile_withSpecialCharacter() throws IOException, URISyntaxException { assertTrue(conn.login("administrator", "openolat")); CourseWithBC courseWithBc = deployCourse(); URI uri = UriBuilder.fromUri(getNodeURI(courseWithBc)).path("files").build(); //create single page URL fileUrl = CoursesFoldersTest.class.getResource("singlepage.html"); assertNotNull(fileUrl); File file = new File(fileUrl.toURI()); String filename = "SingleP\u00E4ge.html"; HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true); conn.addMultipart(method, filename, file); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); VFSContainer folder = BCCourseNode.getNodeFolderContainer((BCCourseNode)courseWithBc.bcNode, courseWithBc.course.getCourseEnvironment()); VFSItem item = folder.resolve(filename); assertNotNull(item); assertEquals(filename, item.getName()); } @Test public void testCreateFolder() throws IOException, URISyntaxException { assertTrue(conn.login("administrator", "openolat")); CourseWithBC courseWithBc = deployCourse(); URI uri = UriBuilder.fromUri(getNodeURI(courseWithBc)).path("files").path("RootFolder").build(); HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); VFSContainer folder = BCCourseNode.getNodeFolderContainer((BCCourseNode)courseWithBc.bcNode, courseWithBc.course.getCourseEnvironment()); VFSItem item = folder.resolve("RootFolder"); assertNotNull(item); assertTrue(item instanceof VFSContainer); } @Test public void testCreateFolders() throws IOException, URISyntaxException { assertTrue(conn.login("administrator", "openolat")); CourseWithBC courseWithBc = deployCourse(); URI uri = UriBuilder.fromUri(getNodeURI(courseWithBc)).path("files").path("NewFolder1").path("NewFolder2").build(); HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); VFSContainer folder = BCCourseNode.getNodeFolderContainer((BCCourseNode)courseWithBc.bcNode, courseWithBc.course.getCourseEnvironment()); VFSItem item = folder.resolve("NewFolder1"); assertNotNull(item); assertTrue(item instanceof VFSContainer); VFSContainer newFolder1 = (VFSContainer)item; VFSItem item2 = newFolder1.resolve("NewFolder2"); assertNotNull(item2); assertTrue(item2 instanceof VFSContainer); } @Test public void testCreateFolders_tooMany() throws IOException, URISyntaxException { assertTrue(conn.login("administrator", "openolat")); CourseWithBC courseWithBc = deployCourse(); URI uri = UriBuilder.fromUri(getNodeURI(courseWithBc)).path("files").path("RootFolder") .path("Folder").path("Folder").path("Folder").path("Folder").path("Folder") .path("Folder").path("Folder").path("Folder").path("Folder").path("Folder") .path("Folder").path("Folder").path("Folder").path("Folder").path("Folder") .path("Folder").path("Folder").path("Folder").path("Folder").path("Folder").build(); HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); assertEquals(406, response.getStatusLine().getStatusCode()); } @Test public void testCreateFolders_withSpecialCharacters() throws IOException, URISyntaxException { assertTrue(conn.login("administrator", "openolat")); CourseWithBC courseWithBc = deployCourse(); URI uri = UriBuilder.fromUri(getNodeURI(courseWithBc)).path("files").path("RootFolder") .path("F\u00FClder").build(); HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); VFSContainer folder = BCCourseNode.getNodeFolderContainer((BCCourseNode)courseWithBc.bcNode, courseWithBc.course.getCourseEnvironment()); VFSItem item = folder.resolve("RootFolder/F\u00FClder"); assertNotNull(item); assertTrue(item instanceof VFSContainer); } @Test public void deleteFolder() throws IOException, URISyntaxException { CourseWithBC courseWithBc = deployCourse(); //add some folders VFSContainer folder = BCCourseNode.getNodeFolderContainer((BCCourseNode)courseWithBc.bcNode, courseWithBc.course.getCourseEnvironment()); VFSItem item = folder.resolve("FolderToDelete"); if(item == null) { folder.createChildContainer("FolderToDelete"); } assertTrue(conn.login("administrator", "openolat")); URI uri = UriBuilder.fromUri(getNodeURI(courseWithBc)).path("files").path("FolderToDelete").build(); HttpDelete method = conn.createDelete(uri, MediaType.APPLICATION_JSON); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); VFSItem deletedItem = folder.resolve("FolderToDelete"); assertNull(deletedItem); } private URI getNodeURI(CourseWithBC courseWithBc) { return UriBuilder.fromUri(getContextURI()).path("repo").path("courses").path(courseWithBc.course.getResourceableId().toString()) .path("elements").path("folder").path(courseWithBc.bcNode.getIdent()).build(); } private URI getNodesURI(CourseWithBC courseWithBc) { return UriBuilder.fromUri(getContextURI()).path("repo").path("courses").path(courseWithBc.course.getResourceableId().toString()) .path("elements").path("folder").build(); } private CourseWithBC deployCourse() { Identity admin = JunitTestHelper.findIdentityByLogin("administrator"); RepositoryEntry courseEntry = JunitTestHelper.deployBasicCourse(admin); ICourse course = CourseFactory.loadCourse(courseEntry); dbInstance.intermediateCommit(); //create a folder CourseNode rootNode = course.getRunStructure().getRootNode(); CourseNodeConfiguration newNodeConfig = CourseNodeFactory.getInstance().getCourseNodeConfiguration("bc"); CourseNode bcNode = newNodeConfig.getInstance(rootNode); bcNode.setShortTitle("Folder"); bcNode.setLearningObjectives("Folder objectives"); bcNode.setNoAccessExplanation("You don't have access"); course.getEditorTreeModel().addCourseNode(bcNode, rootNode); CourseFactory.publishCourse(course, RepositoryEntryStatusEnum.published, true, false, admin, Locale.ENGLISH); return new CourseWithBC(course, bcNode); } private static class CourseWithBC { private final ICourse course; private final CourseNode bcNode; public CourseWithBC(ICourse course, CourseNode bcNode) { this.course = course; this.bcNode = bcNode; } } }
package DataStructure.tree.binaryTree; import DataStructure.tree.Tree; import DataStructure.tree.binaryTree.binaryTreeRealize.BinaryTreeImpl; /** * @author liujun * @date 2020-02-18 18:44 * @description 根据二叉树 创建(二叉树序列化) * 按层: * 1、按层遍历二叉树 输出数组 * S形: * 1、S形遍历二叉树 输出数组 * 按序: * 1、先序遍历二叉树 输出数组 * 2、中序遍历二叉树 输出数组 * 3、后序遍历二叉树 输出数组 */ public interface BinaryTree2Array extends Tree { //按层遍历二叉树 输出数组 public int[] Binary2ArrayLevel(BinaryTreeImpl root); //S形遍历二叉树 输出数组 public int[] Binary2ArrayS(BinaryTreeImpl root); //先序遍历二叉树 输出数组 public int[] Binary2ArrayPreOrder(BinaryTreeImpl root); //先序遍历二叉树 输出数组(特殊符号) public int[] Binary2ArrayPreOrder_Symbol(BinaryTreeImpl root); //中序遍历二叉树 输出数组 public int[] Binary2ArrayInOrder(BinaryTreeImpl root); //中序遍历二叉树 输出数组(特殊符号) public int[] Binary2ArrayInOrder_Symbol(BinaryTreeImpl root); //后序遍历二叉树 输出数组 public int[] Binary2ArrayPostOrder(BinaryTreeImpl root); //后序遍历二叉树 输出数组(特殊符号) public int[] Binary2ArrayPostOrder_Symbol(BinaryTreeImpl root); default boolean check(BinaryTreeImpl root) { return root == null; } }
package io.indexr.util; import java.util.function.Supplier; // http://stackoverflow.com/a/29141814 public class Lazily { @FunctionalInterface public interface Lazy<T> extends Supplier<T> { Supplier<T> init(); public default T get() { return init().get(); } } public static <U> Supplier<U> lazily(Lazy<U> lazy) { return lazy; } public static <T> Supplier<T> value(T value) { return () -> value; } /** Usage example. private static class Baz { } private static Baz createBaz() { return new Baz(); } private Supplier<Baz> fieldBaz = Lazily.lazily(() -> fieldBaz = Lazily.value(createBaz())); */ }
/**Project name : Melite-Meu (MovieTicketingSystem) Roll no: 0147-BSCS-2019 (Section C) Description: THIS CLASS DEALS WITH THE CASH PAYMENT SYSTEM used classes : Customer, LoginServer, Movie, Hall, InputOutputClass **/ package PaymentPackage; import MainPackage.Customer; import MainPackage.LoginServer; import MoviePackage.Movie; import PrimitiveClassesPackage.Hall; import UtilityPackage.InputOutputClass; public class CashPayment extends Payment { /**THIS IS A SINGLE IMPLEMENTATION FOR THE MOVIE ONLY**/ public boolean pay(Customer customer, Movie movie) { if (!(customer.getCashBalance() - movie.getTicketPrice() >= 0)) { return false; } var newBalance = customer.getCashBalance() - movie.getTicketPrice(); customer.setCashBalance(newBalance); /**UPDATING OUR USER**/ try { InputOutputClass.writeUser(LoginServer.usersList); } catch (Exception e) { e.printStackTrace(); } return true; } /***PrimitiveClassesPackage.Hall implementation*/ public boolean pay(Customer customer, Hall hall) { if (!(customer.getCashBalance() - hall.getHallCharges() >= 0)) { return false; } var newBalance = customer.getCashBalance() - hall.getHallCharges(); customer.setCashBalance(newBalance); /**UPDATING OUR USER**/ try { InputOutputClass.writeUser(LoginServer.usersList); } catch (Exception e) { e.printStackTrace(); } return true; } /***NEW HALL IMPLEMENATION**/ public boolean pay(Double hallPrice) { System.out.println("New placeholder tick price: " + hallPrice); /* if (!(MainPackage.LoginServer.currentUser.getCashBalance() - ticket.getHall().getHallPrice() >= 0)) { return false; }*/ var newBalance = LoginServer.currentUser.getCashBalance() - hallPrice; LoginServer.currentUser.setCashBalance(newBalance); /**UPDATING OUR USER**/ try { InputOutputClass.writeUser(LoginServer.usersList); } catch (Exception e) { e.printStackTrace(); } return true; } /***Food implementation**/ public boolean pay (Customer customer, double price) { if (!(customer.getCashBalance() - price >= 0)) { return false; } var newBalance = customer.getCashBalance() - price; customer.setCashBalance(newBalance); /**UPDATING OUR USER**/ try { InputOutputClass.writeUser(LoginServer.usersList); } catch (Exception e) { e.printStackTrace(); } return true; } /* public Double getAmount () { return amount; }*/ }
/* * Copyright 2019 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.bpmn.client.marshall.converters.tostunner.sequenceflows; import java.util.Map; import org.kie.workbench.common.stunner.bpmn.client.marshall.MarshallingMessageKeys; import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.Result; import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.TypedFactoryManager; import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.tostunner.BpmnEdge; import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.tostunner.BpmnNode; import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.tostunner.EdgeConverter; import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.tostunner.properties.PropertyReaderFactory; import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.tostunner.properties.SequenceFlowPropertyReader; import org.kie.workbench.common.stunner.bpmn.definition.SequenceFlow; import org.kie.workbench.common.stunner.bpmn.definition.property.common.ConditionExpression; import org.kie.workbench.common.stunner.bpmn.definition.property.connectors.Priority; import org.kie.workbench.common.stunner.bpmn.definition.property.connectors.SequenceFlowExecutionSet; import org.kie.workbench.common.stunner.bpmn.definition.property.general.BPMNGeneralSet; import org.kie.workbench.common.stunner.bpmn.definition.property.general.Documentation; import org.kie.workbench.common.stunner.bpmn.definition.property.general.Name; import org.kie.workbench.common.stunner.core.graph.Edge; import org.kie.workbench.common.stunner.core.graph.Node; import org.kie.workbench.common.stunner.core.graph.content.view.View; public class SequenceFlowConverter implements EdgeConverter<org.eclipse.bpmn2.SequenceFlow> { private final PropertyReaderFactory propertyReaderFactory; private TypedFactoryManager factoryManager; public SequenceFlowConverter(TypedFactoryManager factoryManager, PropertyReaderFactory propertyReaderFactory) { this.factoryManager = factoryManager; this.propertyReaderFactory = propertyReaderFactory; } @Override public Result<BpmnEdge> convertEdge(org.eclipse.bpmn2.SequenceFlow seq, Map<String, BpmnNode> nodes) { Edge<View<SequenceFlow>, Node> edge = factoryManager.newEdge(seq.getId(), SequenceFlow.class); SequenceFlow definition = edge.getContent().getDefinition(); SequenceFlowPropertyReader p = propertyReaderFactory.of(seq); definition.setGeneral(new BPMNGeneralSet( new Name(p.getName()), new Documentation(p.getDocumentation()) )); definition.setExecutionSet(new SequenceFlowExecutionSet( new Priority(p.getPriority()), new ConditionExpression(p.getConditionExpression()) )); return result(nodes, edge, p, "Sequence Flow ignored from " + p.getSourceId() + " to " + p.getTargetId(), MarshallingMessageKeys.sequenceFlowIgnored); } }
/* * Copyright 2012-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.autoconfigure.health; import java.util.Collection; import org.springframework.boot.actuate.autoconfigure.endpoint.condition.ConditionalOnAvailableEndpoint; import org.springframework.boot.actuate.autoconfigure.endpoint.expose.EndpointExposure; import org.springframework.boot.actuate.endpoint.web.EndpointMapping; import org.springframework.boot.actuate.endpoint.web.ExposableWebEndpoint; import org.springframework.boot.actuate.endpoint.web.WebEndpointsSupplier; import org.springframework.boot.actuate.endpoint.web.WebServerNamespace; import org.springframework.boot.actuate.endpoint.web.reactive.AdditionalHealthEndpointPathsWebFluxHandlerMapping; import org.springframework.boot.actuate.health.HealthEndpoint; import org.springframework.boot.actuate.health.HealthEndpointGroups; import org.springframework.boot.actuate.health.ReactiveHealthContributorRegistry; import org.springframework.boot.actuate.health.ReactiveHealthEndpointWebExtension; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication; import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication.Type; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; /** * Configuration for {@link HealthEndpoint} reactive web extensions. * * @author Phillip Webb * @author Madhura Bhave * @see HealthEndpointAutoConfiguration */ @Configuration(proxyBeanMethods = false) @ConditionalOnWebApplication(type = Type.REACTIVE) @ConditionalOnAvailableEndpoint(endpoint = HealthEndpoint.class, exposure = { EndpointExposure.WEB, EndpointExposure.CLOUD_FOUNDRY }) class HealthEndpointReactiveWebExtensionConfiguration { @Bean @ConditionalOnMissingBean @ConditionalOnBean(HealthEndpoint.class) ReactiveHealthEndpointWebExtension reactiveHealthEndpointWebExtension( ReactiveHealthContributorRegistry reactiveHealthContributorRegistry, HealthEndpointGroups groups, HealthEndpointProperties properties) { return new ReactiveHealthEndpointWebExtension(reactiveHealthContributorRegistry, groups, properties.getLogging().getSlowIndicatorThreshold()); } @Configuration(proxyBeanMethods = false) @ConditionalOnAvailableEndpoint(endpoint = HealthEndpoint.class, exposure = EndpointExposure.WEB) static class WebFluxAdditionalHealthEndpointPathsConfiguration { @Bean AdditionalHealthEndpointPathsWebFluxHandlerMapping healthEndpointWebFluxHandlerMapping( WebEndpointsSupplier webEndpointsSupplier, HealthEndpointGroups groups) { Collection<ExposableWebEndpoint> webEndpoints = webEndpointsSupplier.getEndpoints(); ExposableWebEndpoint health = webEndpoints.stream() .filter((endpoint) -> endpoint.getEndpointId().equals(HealthEndpoint.ID)).findFirst().get(); return new AdditionalHealthEndpointPathsWebFluxHandlerMapping(new EndpointMapping(""), health, groups.getAllWithAdditionalPath(WebServerNamespace.SERVER)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.karaf.bundle.command; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.felix.utils.manifest.Clause; import org.apache.felix.utils.manifest.Parser; import org.apache.felix.utils.version.VersionRange; import org.apache.felix.utils.version.VersionTable; import org.apache.karaf.bundle.command.bundletree.Node; import org.apache.karaf.bundle.command.bundletree.Tree; import org.apache.karaf.shell.api.action.Command; import org.apache.karaf.shell.api.action.Option; import org.apache.karaf.shell.api.action.lifecycle.Service; import org.osgi.framework.Bundle; import org.osgi.framework.Constants; import org.osgi.framework.wiring.BundleCapability; import org.osgi.framework.wiring.BundleRevision; import org.osgi.framework.wiring.BundleRevisions; import org.osgi.framework.wiring.BundleWire; import org.osgi.framework.wiring.BundleWiring; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static java.lang.String.format; /** * Command for showing the full tree of bundles that have been used to resolve * a given bundle. */ @Command(scope = "bundle", name = "tree-show", description = "Shows the tree of bundles based on the wiring information.") @Service public class ShowBundleTree extends BundleCommand { private static final Logger LOGGER = LoggerFactory.getLogger(ShowBundleTree.class); @Option(name = "-v", aliases = { "--version" }, description = "Show bundle versions") private boolean versions; private Tree<Bundle> tree; @Override protected Object doExecute(Bundle bundle) throws Exception { long start = System.currentTimeMillis(); // let's do the real work here printHeader(bundle); tree = new Tree<>(bundle); createTree(bundle); printTree(tree); printDuplicatePackages(tree); LOGGER.debug(format("Dependency tree calculated in %d ms", System.currentTimeMillis() - start)); return null; } /** * Return a String representation of a bundle state */ private String getState(Bundle bundle) { switch (bundle.getState()) { case Bundle.UNINSTALLED : return "UNINSTALLED"; case Bundle.INSTALLED : return "INSTALLED"; case Bundle.RESOLVED: return "RESOLVED"; case Bundle.STARTING : return "STARTING"; case Bundle.STOPPING : return "STOPPING"; case Bundle.ACTIVE : return "ACTIVE"; default : return "UNKNOWN"; } } /* * Print the header */ private void printHeader(Bundle bundle) { System.out.printf("Bundle %s [%s] is currently %s%n", bundle.getSymbolicName(), bundle.getBundleId(), getState(bundle)); } /* * Print the dependency tree */ private void printTree(Tree<Bundle> tree) { System.out.printf("%n"); tree.write(System.out, node -> { if (versions) { return String.format("%s / [%s] [%s]", node.getValue().getSymbolicName(), node.getValue().getVersion().toString(), node.getValue().getBundleId()); } else { return String.format("%s [%s]", node.getValue().getSymbolicName(), node.getValue().getBundleId()); } }); } /* * Check for bundles in the tree exporting the same package * as a possible cause for 'Unresolved constraint...' on a uses-conflict */ private void printDuplicatePackages(Tree<Bundle> tree) { Set<Bundle> bundles = tree.flatten(); Map<String, Set<Bundle>> exports = new HashMap<>(); for (Bundle bundle : bundles) { for (BundleRevision revision : bundle.adapt(BundleRevisions.class).getRevisions()) { BundleWiring wiring = revision.getWiring(); if (wiring != null) { List<BundleWire> wires = wiring.getProvidedWires(BundleRevision.PACKAGE_NAMESPACE); if (wires != null) { for (BundleWire wire : wires) { String name = wire.getCapability().getAttributes().get(BundleRevision.PACKAGE_NAMESPACE).toString(); exports.computeIfAbsent(name, k -> new HashSet<>()).add(bundle); } } } } } for (String pkg : exports.keySet()) { if (exports.get(pkg).size() > 1) { System.out.printf("%n"); System.out.printf("WARNING: multiple bundles are exporting package %s%n", pkg); for (Bundle bundle : exports.get(pkg)) { System.out.printf("- %s%n", bundle); } } } } /* * Creates the bundle tree */ protected void createTree(Bundle bundle) { if (bundle.getState() >= Bundle.RESOLVED) { createNode(tree); } else { createNodesForImports(tree, bundle); System.out.print("\nWarning: the below tree is a rough approximation of a possible resolution"); } } /* * Creates nodes for the imports of the bundle (instead of reporting wiring information */ private void createNodesForImports(Node<Bundle> node, Bundle bundle) { Clause[] imports = Parser.parseHeader(bundle.getHeaders().get("Import-Package")); Clause[] exports = Parser.parseHeader(bundle.getHeaders().get("Export-Package")); for (Clause i : imports) { boolean exported = false; for (Clause e : exports) { if (e.getName().equals(i.getName())) { exported = true; break; } } if (!exported) { createNodeForImport(node, bundle, i); } } } /* * Create a child node for a given import (by finding a matching export in the currently installed bundles) */ private void createNodeForImport(Node<Bundle> node, Bundle bundle, Clause i) { VersionRange range = VersionRange.parseVersionRange(i.getAttribute(Constants.VERSION_ATTRIBUTE)); boolean foundMatch = false; for (Bundle b : bundleContext.getBundles()) { BundleWiring wiring = b.adapt(BundleWiring.class); if (wiring != null) { List<BundleCapability> caps = wiring.getCapabilities(BundleRevision.PACKAGE_NAMESPACE); if (caps != null) { for (BundleCapability cap : caps) { String n = getAttribute(cap, BundleRevision.PACKAGE_NAMESPACE); String v = getAttribute(cap, Constants.VERSION_ATTRIBUTE); if (i.getName().equals(n) && range.contains(VersionTable.getVersion(v))) { boolean existing = tree.flatten().contains(b); System.out.printf("- import %s: resolved using %s%n", i, b); foundMatch = true; if (!node.hasChild(b)) { Node<Bundle> child = node.addChild(b); if (!existing) { createNode(child); } } } } } } } if (!foundMatch) { System.out.printf("- import %s: WARNING - unable to find matching export%n", i); } } private String getAttribute(BundleCapability capability, String name) { Object o = capability.getAttributes().get(name); return o != null ? o.toString() : null; } /* * Creates a node in the bundle tree */ private void createNode(Node<Bundle> node) { Bundle bundle = node.getValue(); Collection<Bundle> exporters = new HashSet<>(); exporters.addAll(bundleService.getWiredBundles(bundle).values()); for (Bundle exporter : exporters) { if (node.hasAncestor(exporter)) { LOGGER.debug(format("Skipping %s (already exists in the current branch)", exporter)); } else { boolean existing = tree.flatten().contains(exporter); LOGGER.debug(format("Adding %s as a dependency for %s", exporter, bundle)); Node<Bundle> child = node.addChild(exporter); if (existing) { LOGGER.debug(format("Skipping children of %s (already exists in another branch)", exporter)); } else { createNode(child); } } } } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.registry; import org.jkiss.dbeaver.model.DBConstants; import org.jkiss.dbeaver.model.impl.preferences.SimplePreferenceStore; import org.jkiss.dbeaver.runtime.DBWorkbench; import org.jkiss.utils.CommonUtils; import java.io.IOException; import java.util.Map; /** * DataSourcePreferenceStore */ public class DataSourcePreferenceStore extends SimplePreferenceStore { private final DataSourceDescriptor dataSourceDescriptor; DataSourcePreferenceStore(DataSourceDescriptor dataSourceDescriptor) { super(DBWorkbench.getPlatform().getPreferenceStore()); this.dataSourceDescriptor = dataSourceDescriptor; // Init default properties from driver overrides Map<Object,Object> defaultConnectionProperties = dataSourceDescriptor.getDriver().getDefaultConnectionProperties(); for (Map.Entry<Object, Object> prop : defaultConnectionProperties.entrySet()) { String propName = CommonUtils.toString(prop.getKey()); if (propName.startsWith(DBConstants.DEFAULT_DRIVER_PROP_PREFIX)) { getDefaultProperties().put( propName.substring(DBConstants.DEFAULT_DRIVER_PROP_PREFIX.length()), CommonUtils.toString(prop.getValue())); } } } public DataSourceDescriptor getDataSourceDescriptor() { return dataSourceDescriptor; } @Override public void save() throws IOException { dataSourceDescriptor.getRegistry().flushConfig(); } }
package com.evolveum.midpoint.schrodinger.page.configuration; import com.codeborne.selenide.Condition; import com.codeborne.selenide.SelenideElement; import com.evolveum.midpoint.schrodinger.MidPoint; import com.evolveum.midpoint.schrodinger.component.common.FeedbackBox; import com.evolveum.midpoint.schrodinger.page.BasicPage; import org.openqa.selenium.By; import org.openqa.selenium.remote.RemoteWebElement; import java.io.File; import static com.codeborne.selenide.Selenide.$; import static com.evolveum.midpoint.schrodinger.util.Utils.setOptionChecked; /** * Created by Viliam Repan (lazyman). */ public class ImportObjectPage extends BasicPage { public ImportObjectPage checkProtectedByEncryption() { setOptionChecked("importOptions:protectedByEncryption", true); return this; } public ImportObjectPage checkFetchResourceSchema() { setOptionChecked("importOptions:fetchResourceSchema", true); return this; } public ImportObjectPage checkKeepOid() { setOptionChecked("importOptions:keepOid", true); return this; } public ImportObjectPage checkOverwriteExistingObject() { setOptionChecked("importOptions:overwriteExistingObject", true); return this; } public ImportObjectPage checkReferentialIntegrity() { setOptionChecked("importOptions:referentialIntegrity", true); return this; } public ImportObjectPage checkSummarizeSuccesses() { setOptionChecked("importOptions:summarizeSuccesses", true); return this; } public ImportObjectPage checkValidateDynamicSchema() { setOptionChecked("importOptions:validateDynamicSchema", true); return this; } public ImportObjectPage checkValidateStaticSchema() { setOptionChecked("importOptions:validateStaticSchema", true); return this; } public ImportObjectPage checkSummarizeErrors() { setOptionChecked("importOptions:summarizeErrors", true); return this; } public ImportObjectPage uncheckProtectedByEncryption() { setOptionChecked("importOptions:protectedByEncryption", false); return this; } public ImportObjectPage uncheckFetchResourceSchema() { setOptionChecked("importOptions:fetchResourceSchema", false); return this; } public ImportObjectPage uncheckKeepOid() { setOptionChecked("importOptions:keepOid", false); return this; } public ImportObjectPage uncheckOverwriteExistingObject() { setOptionChecked("importOptions:overwriteExistingObject", false); return this; } public ImportObjectPage uncheckReferentialIntegrity() { setOptionChecked("importOptions:referentialIntegrity", false); return this; } public ImportObjectPage uncheckSummarizeSuccesses() { setOptionChecked("importOptions:summarizeSuccesses", false); return this; } public ImportObjectPage uncheckValidateDynamicSchema() { setOptionChecked("importOptions:validateDynamicSchema", false); return this; } public ImportObjectPage uncheckValidateStaticSchema() { setOptionChecked("importOptions:validateStaticSchema", false); return this; } public ImportObjectPage uncheckSummarizeErrors() { setOptionChecked("importOptions:summarizeErrors", false); return this; } public ImportObjectPage stopAfterErrorsExceed(Integer count) { String c = count == null ? "" : count.toString(); $(By.name("importOptions:errors")).setValue(c); return this; } public ImportObjectPage getObjectsFromFile() { $(By.name("importRadioGroup")).selectRadio("radio0"); return this; } public ImportObjectPage getObjectsFromEmbeddedEditor() { $(By.name("importRadioGroup")).selectRadio("radio1"); return this; } public ImportObjectPage chooseFile(File file) { $(By.name("input:inputFile:fileInput")).uploadFile(file); //todo implement return this; } public ImportObjectPage setEditorText(String text) { // todo implement, nothing works yet // executeJavaScript( // "const ta = document.querySelector(\"textarea\"); " + // "ta.value = \"asdf\";" + // "ta.dispatchEvent(new Event(\"input\"));"); // text = "asdf"; // executeJavaScript( // "var ta = $(\"textarea[name='input:inputAce:aceEditor']\"); " + // "ta.value('" + text + "'); " + // "ta.dispatchEvent(new Event(\"input\"));"); // $(".ace_content").shouldBe(Condition.visible); // $(".ace_content").click(); // $(".ace_content").sendKeys("asdf"); return this; } public ImportObjectPage clickImport() { $(".main-button-bar").$x("//a[@about='importFileButton']").click(); return this; } }
package org.pinpoint.bench4q.bookstore.interceptor; import java.security.ProtectionDomain; import org.pinpoint.bench4q.sample.PiggyMetricsConstants; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentClass; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentException; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentMethod; import com.navercorp.pinpoint.bootstrap.instrument.Instrumentor; import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformCallback; import static com.navercorp.pinpoint.common.util.VarArgs.va; public class Product_detail_servlet_Interceptor implements TransformCallback { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { // 1. Get InstrumentClass of the target class InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer); // 2. Get InstrumentMethod of the target method. InstrumentMethod targetMethod = target.getDeclaredMethod("doGet", "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse"); // 3. Add interceptor. The first argument is FQN of the interceptor // class, followed by arguments for the interceptor's constructor. targetMethod.addInterceptor("com.navercorp.pinpoint.bootstrap.interceptor.BasicMethodInterceptor", va(PiggyMetricsConstants.MY_SERVICE_TYPE)); // 4. Return resulting byte code. return target.toBytecode(); } }
package com.alinz.parkerdan.shareextension; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.Promise; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.WritableMap; import com.facebook.react.bridge.WritableArray; import com.facebook.react.bridge.Arguments; import android.app.Activity; import android.content.Intent; import android.net.Uri; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.Environment; import android.util.Log; import java.io.File; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import java.util.HashSet; import java.util.Set; import java.util.ArrayList; import java.text.SimpleDateFormat; import java.util.Date; class CameraConstants { public static final int PICTURE = 0; public static final int VIDEO = 1; public static final int JPEG = 0; public static final int PNG = 1; public static final String JPEG_EXTENSION = ".jpg"; public static final String PNG_EXTENSION = ".png"; public static final String MP4_EXTENSION = ".mp4"; public static final String JPEG_MIME_TYPE = "image/jpeg"; public static final String PNG_MIME_TYPE = "image/png"; public static final String MP4_MIME_TYPE = "video/mp4"; public static final String TIME_FORMAT = "yyyyMMdd_HHmmss"; public static final int FOCUS_INDICATOR_FADE_DURATION = 750; public static final int ZOOM_INDICATOR_FADE_DURATION = 1500; public static final int DEFAULT_MAX_VIDEO_DURATION = 120 * 1000; public static final int CIF = 0; public static final int QUALITY_480P = 1; public static final int QUALITY_720P = 2; public static final int QUALITY_1080P = 3; public static final int DEFAULT_THUMBNAIL_SIZE = 300; public static final String GALLERY_COPY_PREFIX = "smx_gallery_copy_"; } public class ShareModule extends ReactContextBaseJavaModule { private static final String TEMP_TAG = "temp"; private static ReactApplicationContext context; public ShareModule(ReactApplicationContext reactContext) { super(reactContext); ShareModule.context = reactContext; } @Override public String getName() { return "ReactNativeShareExtension"; } @ReactMethod public void close() { clearCache(); getCurrentActivity().finish(); } @ReactMethod public void data(Promise promise) { promise.resolve(processIntent()); } private boolean isImage(String type) { if (type != null && type.contains("image")) { return true; } else { return false; } } /** ==== File Helper Functions ==== */ public static String getTempDirectoryPath() { File cache = null; // SD Card Mounted if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) { cache = ShareModule.context.getExternalCacheDir(); } // Use internal storage else { cache = ShareModule.context.getCacheDir(); } // Create the cache directory if it doesn't exist cache.mkdirs(); return cache.getAbsolutePath(); } public static File getTempCacheDirectory() { File mediaStorageDir = new File(getTempDirectoryPath(), TEMP_TAG); if (!mediaStorageDir.exists()) { if (!mediaStorageDir.mkdirs()) { return null; } } return mediaStorageDir; } public static boolean clearCache() { File cacheDirectory = getTempCacheDirectory(); try { if (cacheDirectory != null && cacheDirectory.isDirectory()) { for (File child: cacheDirectory.listFiles()) { child.delete(); } } return cacheDirectory.delete(); } catch (Exception e) { return false; } } public static File createTempCaptureFile(int encodingType) { return createTempCaptureFile(encodingType, ""); } public static File createTempCaptureFile(int encodingType, String fileName) { String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmssSSSS").format(new Date()); if (fileName.isEmpty()) { fileName = timeStamp; } if (encodingType == CameraConstants.JPEG) { fileName = fileName + CameraConstants.JPEG_EXTENSION; } else if (encodingType == CameraConstants.PNG) { fileName = fileName + CameraConstants.PNG_EXTENSION; } else { throw new IllegalArgumentException("Invalid Encoding Type: " + encodingType); } return new File(getTempCacheDirectory(), fileName); } public WritableMap convertImage(Activity currentActivity, String type, String path, Uri uri) { // make a jpeg copy of the image in the cache directory, return that URI instead WritableMap dataMap = Arguments.createMap(); try { Bitmap bitmap = BitmapFactory.decodeFile(RealPathUtil.getRealPathFromURI(currentActivity, uri)); File outputFile = createTempCaptureFile(CameraConstants.JPEG); OutputStream outStream = new FileOutputStream(outputFile); bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outStream); outStream.flush(); outStream.close(); dataMap.putString("type", CameraConstants.JPEG_MIME_TYPE); dataMap.putString("value", "file://" + outputFile.getAbsolutePath()); Log.i("smx-share: succeeded", outputFile.getAbsolutePath()); } catch (Exception e) { Log.e("smx-share: exception", e.getMessage()); dataMap.putString("type", type); dataMap.putString("value", path); } return dataMap; } public WritableArray processIntent() { WritableArray dataArrayMap = Arguments.createArray(); Set<String> mediaTypesSupported = new HashSet<String>(); // mediaTypesSupported.add("video"); // mediaTypesSupported.add("audio"); mediaTypesSupported.add("image"); mediaTypesSupported.add("application/pdf"); String type = ""; String action = ""; String typePart = ""; Activity currentActivity = getCurrentActivity(); clearCache(); if (currentActivity != null) { Intent intent = currentActivity.getIntent(); action = intent.getAction(); type = intent.getType(); if (type == null) { type = ""; } else { typePart = type.substring(0, type.indexOf("/")); } if (Intent.ACTION_SEND.equals(action) && "text/plain".equals(type)) { WritableMap dataMap = Arguments.createMap(); dataMap.putString("type", type); dataMap.putString("value", intent.getStringExtra(Intent.EXTRA_TEXT)); dataArrayMap.pushMap(dataMap); } else if (Intent.ACTION_SEND.equals(action) && ( mediaTypesSupported.contains(typePart) || mediaTypesSupported.contains(type))) { Uri uri = (Uri) intent.getParcelableExtra(Intent.EXTRA_STREAM); String path = "file://" + RealPathUtil.getRealPathFromURI(currentActivity, uri); // Check the path coming from the uri for type, intent.getType() only returns image/jpeg for some reason if (this.isImage(type) && !path.endsWith("jpg")) { WritableMap dataMap = this.convertImage(currentActivity, type, path, uri); dataArrayMap.pushMap(dataMap); } else { Log.i("smx-share: no convert", type); WritableMap dataMap = Arguments.createMap(); dataMap.putString("type", type); dataMap.putString("value", path); dataArrayMap.pushMap(dataMap); } } else if (Intent.ACTION_SEND_MULTIPLE.equals(action) && (mediaTypesSupported.contains(typePart) || mediaTypesSupported.contains(type))) { ArrayList<Uri> uris = intent.getParcelableArrayListExtra(Intent.EXTRA_STREAM); for (Uri uri : uris) { String path = "file://" + RealPathUtil.getRealPathFromURI(currentActivity, uri); if (this.isImage(type) && !path.endsWith("jpg")) { WritableMap dataMap = this.convertImage(currentActivity, type, path, uri); dataArrayMap.pushMap(dataMap); } else { WritableMap dataMap = Arguments.createMap(); dataMap.putString("type", type); dataMap.putString("value", "file://" + RealPathUtil.getRealPathFromURI(currentActivity, uri)); dataArrayMap.pushMap(dataMap); } } } } return dataArrayMap; } }
package edu.nwpu.ad.client.vo; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import java.util.List; /** * 尝试在检索系统中通过代码的形式, * 微服务调用去获取广告投放系统AdPlan的一些信息 */ @Data @NoArgsConstructor @AllArgsConstructor public class AdPlanGetRequest { private Long userId; private List<Long> ids; }
package spark; import java.util.regex.Pattern; import org.apache.spark.SparkConf; import org.apache.spark.api.java.StorageLevels; import org.apache.spark.api.java.function.FlatMapFunction; import org.apache.spark.api.java.function.Function2; import org.apache.spark.api.java.function.PairFunction; import org.apache.spark.streaming.Durations; import org.apache.spark.streaming.api.java.JavaDStream; import org.apache.spark.streaming.api.java.JavaPairDStream; import org.apache.spark.streaming.api.java.JavaReceiverInputDStream; import org.apache.spark.streaming.api.java.JavaStreamingContext; import scala.Tuple2; import com.google.common.collect.Lists; /** * Counts words in UTF8 encoded, '\n' delimited text received from the network every second. * * To run this on your local machine, you need to first run a Netcat server * ncat -4lk localhost 9999 * * and then run the example */ public final class Streaming102 { private static final Pattern SPACE = Pattern.compile(" "); public static void main(String[] args) { // Create the context with a 1 second batch size SparkConf sparkConf = new SparkConf().setMaster("local[*]").setAppName("Streaming101"); JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(10)); JavaReceiverInputDStream<String> lines = ssc.socketTextStream("localhost",9999, StorageLevels.MEMORY_AND_DISK_SER); JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() { @Override public Iterable<String> call(String x) { System.out.println(x); return Lists.newArrayList(SPACE.split(x)); } }); JavaPairDStream<String, Integer> wordCounts = words.mapToPair( new PairFunction<String, String, Integer>() { @Override public Tuple2<String, Integer> call(String s) { return new Tuple2<String, Integer>(s, 1); } }).reduceByKey(new Function2<Integer, Integer, Integer>() { @Override public Integer call(Integer i1, Integer i2) { return i1 + i2; } }); wordCounts.print(); ssc.start(); ssc.awaitTermination(); } }
package com.ways2u.java.processor; import com.google.auto.service.AutoService; import com.ways2u.java.annotation.Factory; import javax.annotation.processing.*; import javax.lang.model.SourceVersion; import javax.lang.model.element.*; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Elements; import javax.lang.model.util.Types; import javax.tools.Diagnostic; import java.io.IOException; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; /** * Created by huanglong on 2016/12/19. */ @AutoService(Processor.class) public class FactoryProcessor extends AbstractProcessor { private Types typeUtils; private Elements elementUtils; private Filer filer; private Messager messager; private Map<String, FactoryGroupedClasses> factoryClasses = new LinkedHashMap<String, FactoryGroupedClasses>(); @Override public synchronized void init(ProcessingEnvironment env) { super.init(env); typeUtils = env.getTypeUtils(); elementUtils = env.getElementUtils(); filer = env.getFiler(); messager = env.getMessager(); } @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { //得到注解类型,如com.ways2u.java.annotation.Factory //for (TypeElement typeElement : annotations) { // warn(null, typeElement.toString()); //} //在线程池里面处理的 //warn(null, Thread.currentThread().getName()); try { for (Element element : roundEnv.getElementsAnnotatedWith(Factory.class)) { //标有注解的类,如com.ways2u.java.demo.Pizza //warn(element, element.toString()); //warn(element,element.getQualifiedName().toString()); if (element.getKind() != ElementKind.CLASS) { throw new ProcessingException(element, "Only classes can be annotated with @%s", Factory.class.getSimpleName()); } TypeElement typeElement = (TypeElement) element; FactoryAnnotatedClass annotatedClass = new FactoryAnnotatedClass(typeElement); checkValidClass(annotatedClass); // Everything is fine, so try to add FactoryGroupedClasses factoryClass = factoryClasses.get(annotatedClass.getQualifiedFactoryGroupName()); if (factoryClass == null) { String qualifiedGroupName = annotatedClass.getQualifiedFactoryGroupName(); factoryClass = new FactoryGroupedClasses(qualifiedGroupName); factoryClasses.put(qualifiedGroupName, factoryClass); } // Checks if id is conflicting with another @Factory annotated class with the same id factoryClass.add(annotatedClass); } // Generate code for (FactoryGroupedClasses factoryClass : factoryClasses.values()) { factoryClass.generateCode(elementUtils, filer); } factoryClasses.clear(); } catch (ProcessingException e) { error(e.getElement(), e.getMessage()); } catch (IOException e) { error(null, e.getMessage()); } return false; } @Override public Set<String> getSupportedAnnotationTypes() { Set<String> set = new LinkedHashSet<String>(); set.add(Factory.class.getCanonicalName()); return set; } @Override public SourceVersion getSupportedSourceVersion() { return SourceVersion.latestSupported(); } /** * Checks if the annotated element observes our rules */ private void checkValidClass(FactoryAnnotatedClass item) throws ProcessingException { // Cast to TypeElement, has more type specific methods TypeElement classElement = item.getTypeElement(); //类要 PUBLIC if (!classElement.getModifiers().contains(Modifier.PUBLIC)) { throw new ProcessingException(classElement, "The class %s is not public.", classElement.getQualifiedName().toString()); } // Check if it's an abstract class if (classElement.getModifiers().contains(Modifier.ABSTRACT)) { throw new ProcessingException(classElement, "The class %s is abstract. You can't annotate abstract classes with @%", classElement.getQualifiedName().toString(), Factory.class.getSimpleName()); } // Check inheritance: Class must be childclass as specified in @Factory.type(); TypeElement superClassElement = elementUtils.getTypeElement(item.getQualifiedFactoryGroupName()); if (superClassElement.getKind() == ElementKind.INTERFACE) { // Check interface implemented if (!classElement.getInterfaces().contains(superClassElement.asType())) { //处理继承关系, 例如MyPizza extends Pizza 而 Pizza implements Meal TypeElement currentClass = classElement; while (true) { TypeMirror superClassType = currentClass.getSuperclass(); currentClass = (TypeElement) typeUtils.asElement(superClassType); if (currentClass!=null && currentClass.getInterfaces().contains(superClassElement.asType())) { // Required super class found break; } if (currentClass==null || superClassType.getKind() == TypeKind.NONE) { throw new ProcessingException(classElement, "The class %s annotated with @%s must implement the interface %s", classElement.getQualifiedName().toString(), Factory.class.getSimpleName(), item.getQualifiedFactoryGroupName()); } } } } else { // Check subclassing TypeElement currentClass = classElement; while (true) { TypeMirror superClassType = currentClass.getSuperclass(); if (superClassType.getKind() == TypeKind.NONE) { // Basis class (java.lang.Object) reached, so exit throw new ProcessingException(classElement, "The class %s annotated with @%s must inherit from %s", classElement.getQualifiedName().toString(), Factory.class.getSimpleName(), item.getQualifiedFactoryGroupName()); } if (superClassType.toString().equals(item.getQualifiedFactoryGroupName())) { // Required super class found break; } // Moving up in inheritance tree currentClass = (TypeElement) typeUtils.asElement(superClassType); } } // Check if an empty public constructor is given for (Element enclosed : classElement.getEnclosedElements()) { if (enclosed.getKind() == ElementKind.CONSTRUCTOR) { ExecutableElement constructorElement = (ExecutableElement) enclosed; if (constructorElement.getParameters().size() == 0 && constructorElement.getModifiers() .contains(Modifier.PUBLIC)) { // Found an empty constructor return; } } } // No empty constructor found throw new ProcessingException(classElement, "The class %s must provide an public empty default constructor", classElement.getQualifiedName().toString()); } /** * Prints an error message 只能用这用方式打印 * * @param e The element which has caused the error. Can be null * @param msg The error message */ public void error(Element e, String msg) { messager.printMessage(Diagnostic.Kind.ERROR, msg, e); } public void warn(Element e, String msg) { messager.printMessage(Diagnostic.Kind.WARNING, msg, e); } }
/* * (C) Copyright 2020 Radix DLT Ltd * * Radix DLT Ltd licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the * License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package com.radixdlt.atommodel.validators; import nl.jqno.equalsverifier.EqualsVerifier; import nl.jqno.equalsverifier.Warning; import org.junit.Test; public class RegisteredValidatorParticleTest { @Test public void equalsContract() { EqualsVerifier.forClass(RegisteredValidatorParticle.class) .suppress(Warning.NONFINAL_FIELDS) .verify(); } }
/* * Copyright 2002-2004 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.springframework.binding.value.swing; import javax.swing.JComponent; import org.springframework.richclient.form.builder.FormComponentInterceptor; /** * @author Peter De Bruycker */ public class TestableFormComponentInterceptor implements FormComponentInterceptor { private JComponent component; private int componentCount; private String componentProperty; private JComponent label; private int labelCount; private String labelProperty; public JComponent getComponent() { return component; } public int getComponentCount() { return componentCount; } public String getComponentProperty() { return componentProperty; } public JComponent getLabel() { return label; } public int getLabelCount() { return labelCount; } public String getLabelProperty() { return labelProperty; } public void processComponent(String propertyName, JComponent component) { componentCount++; componentProperty = propertyName; this.component = component; } public void processLabel(String propertyName, JComponent label) { labelCount++; labelProperty = propertyName; this.label = label; } }
package htmlcompiler.pojos.httpmock; import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpHandler; import htmlcompiler.services.HttpHandlers; import java.nio.file.Path; import java.util.List; import static htmlcompiler.services.HttpHandlers.*; import static java.nio.charset.StandardCharsets.UTF_8; public final class Request { public final Endpoint endpoint; private final int statusCode; private final List<Header> headers; private final String body; public Request(final Endpoint endpoint, final int statusCode, final List<Header> headers, final String body) { this.endpoint = endpoint; this.statusCode = statusCode; this.headers = headers; this.body = body; } public static HttpHandler toHttpHandler(final Request request, final Path... directories) { if (request.statusCode == 604) { // 604 is 2 times a 302, we want to send the file at Location final var location = toLocationHeader(request); if (location == null) return HttpHandlers::send404; return exchange -> { for (final Path dir : directories) { final var file = toFile(dir, location.value, null); if (file == null) continue; sendFile(exchange, file); return; } send404(exchange); }; } else return exchange -> { final Headers responseHeaders = exchange.getResponseHeaders(); for (final Header header : request.headers) { if ("Content-Length".equalsIgnoreCase(header.name)) continue; responseHeaders.add(header.name, header.value); } final byte[] bodyBytes = request.body.getBytes(UTF_8); final int bodyLength = bodyBytes.length == 0 ? -1 : bodyBytes.length; exchange.sendResponseHeaders(request.statusCode, bodyLength); exchange.getResponseBody().write(bodyBytes); exchange.close(); }; } private static Header toLocationHeader(final Request request) { for (final var header : request.headers) if ("Location".equalsIgnoreCase(header.name)) return header; return null; } }
package org.rhett.admin.shiro; import org.apache.shiro.web.filter.authc.BasicHttpAuthenticationFilter; import org.rhett.admin.model.constant.SysConstant; import org.rhett.admin.model.enumeration.ResultCode; import org.rhett.admin.model.result.Result; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.RequestMethod; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * @Author Rhett * @Date 2021/6/24 * @Description * 自定义jwt过滤器,对token进行处理 */ public class MyTokenFilter extends BasicHttpAuthenticationFilter { /** * 判断用户是否想要登入 * 只需检测Header里面是否有Authorization字段即可 * @param request 请求 * @param response 响应 * @return boolean */ @Override protected boolean isLoginAttempt(ServletRequest request, ServletResponse response) { HttpServletRequest req = (HttpServletRequest) request; String authorization = req.getHeader(SysConstant.TOKEN_HEAD); return authorization != null; } @Override protected boolean executeLogin(ServletRequest request, ServletResponse response) { //return super.executeLogin(request, response); HttpServletRequest httpServletRequest = (HttpServletRequest) request; String header = httpServletRequest.getHeader(SysConstant.TOKEN_HEAD); MyAuthenticationToken token = new MyAuthenticationToken(header); // 提交给realm进行登入 getSubject(request, response).login(token); return true; } /** * 判断是否允许通过 * 最终返回的结果都是true,即允许访问 * 如果返回false,请求会被拦截 * 所以这里返回true,Controller中可以通过subject.isAuthenticated()来判断用户是否登录 * 如果有些资源只有登录用户才能访问,只需要在方法上面加上@RequiresAuthentication注解即可 * 但有一个缺点,就是不能对GET,POST等请求进行分别过滤鉴权,因为重写了官方的方法 * @param request 请求 * @param response 响应 * @param mappedValue * @return boolean */ @Override protected boolean isAccessAllowed(ServletRequest request, ServletResponse response, Object mappedValue) { if (isLoginAttempt(request, response)) { try { this.executeLogin(request, response); return true; } catch (Exception e) { responseError(response, "shiro fail"); } } return false; } /** * 重写 onAccessDenied 方法,避免父类中调用再次executeLogin * @param request 请求 * @param response 响应 * @param mappedValue * @return boolean */ @Override protected boolean onAccessDenied(ServletRequest request, ServletResponse response, Object mappedValue) { this.sendChallenge(request, response); return false; } @Override protected boolean preHandle(ServletRequest request, ServletResponse response) throws Exception { HttpServletRequest httpServletRequest = (HttpServletRequest) request; HttpServletResponse httpServletResponse = (HttpServletResponse) response; httpServletResponse.setHeader("Access-control-Allow-Origin", httpServletRequest.getHeader("Origin")); httpServletResponse.setHeader("Access-Control-Allow-Methods", "GET,POST,OPTIONS,PUT,DELETE"); httpServletResponse.setHeader("Access-Control-Allow-Headers", httpServletRequest.getHeader("Access-Control-Allow-Headers")); if (httpServletRequest.getMethod().equals(RequestMethod.OPTIONS.name())) { httpServletResponse.setStatus(HttpStatus.OK.value()); return false; } return super.preHandle(request, response); } private void responseError(ServletResponse response, String msg) { HttpServletResponse httpResponse = (HttpServletResponse) response; httpResponse.setStatus(HttpStatus.UNAUTHORIZED.value()); httpResponse.setContentType("application/json;charset=UTF-8"); httpResponse.setCharacterEncoding("UTF-8"); try { httpResponse.getWriter().append(Result.failure(ResultCode.COMMON_ERROR, msg).toString()); } catch (Exception e) { e.printStackTrace(); } } }
/** * Copyright © 2016-2019 The Thingsboard Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package thingsboard.common.data.kv; public interface ReadTsKvQuery extends TsKvQuery { long getInterval(); int getLimit(); Aggregation getAggregation(); String getOrderBy(); }
package fr.formation.ssiinomore.entity; import java.util.List; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.Id; import javax.persistence.OneToMany; import javax.persistence.Table; import org.springframework.security.core.GrantedAuthority; import com.fasterxml.jackson.annotation.JsonIgnore; /** * */ @Entity @Table(name = "role") public class Role implements GrantedAuthority { /** * */ private static final long serialVersionUID = 1L; @Id private Integer id; @Column private String intitule; @OneToMany(fetch=FetchType.LAZY, mappedBy="role", cascade={CascadeType.REMOVE}) @JsonIgnore private List<Utilisateur> utilisateurs; /** * Default constructor */ public Role() { } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getIntitule() { return intitule; } public void setIntitule(String intitule) { this.intitule = intitule; } public List<Utilisateur> getUtilisateurs() { return utilisateurs; } public void setUtilisateurs(List<Utilisateur> utilisateurs) { this.utilisateurs = utilisateurs; } @Override public String getAuthority() { return this.getIntitule(); } }
package com.anysoftkeyboard.quicktextkeys; import android.support.annotation.NonNull; import com.anysoftkeyboard.dictionaries.KeyCodesProvider; import java.util.List; public interface TagsExtractor { /** * Is this extractor actually do anything. */ boolean isEnabled(); /** * Returns a list of all quick-text outputs related to the given tag. */ List<CharSequence> getOutputForTag(@NonNull CharSequence typedTagToSearch, KeyCodesProvider wordComposer); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.configuration2.io; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.net.URLStreamHandler; import java.util.Map; import org.apache.commons.configuration2.ex.ConfigurationException; import org.apache.commons.configuration2.ex.ConfigurationRuntimeException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.vfs2.FileContent; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemConfigBuilder; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileSystemManager; import org.apache.commons.vfs2.FileSystemOptions; import org.apache.commons.vfs2.VFS; import org.apache.commons.vfs2.provider.UriParser; /** * FileSystem that uses <a href="https://commons.apache.org/proper/commons-vfs/">Apache Commons VFS</a>. * * @since 1.7 */ public class VFSFileSystem extends DefaultFileSystem { /** * Stream handler required to create URL. */ private static class VFSURLStreamHandler extends URLStreamHandler { /** The Protocol used */ private final String protocol; public VFSURLStreamHandler(final FileName file) { this.protocol = file.getScheme(); } @Override protected URLConnection openConnection(final URL url) throws IOException { throw new IOException("VFS URLs can only be used with VFS APIs"); } } /** The logger. */ private final Log log = LogFactory.getLog(getClass()); public VFSFileSystem() { // empty } @Override public String getBasePath(final String path) { if (UriParser.extractScheme(path) == null) { return super.getBasePath(path); } try { final FileName parent = resolveURI(path).getParent(); return parent != null ? parent.getURI() : null; } catch (final FileSystemException fse) { fse.printStackTrace(); return null; } } @Override public String getFileName(final String path) { if (UriParser.extractScheme(path) == null) { return super.getFileName(path); } try { return resolveURI(path).getBaseName(); } catch (final FileSystemException fse) { fse.printStackTrace(); return null; } } @Override public InputStream getInputStream(final URL url) throws ConfigurationException { FileObject file; try { final FileSystemOptions opts = getOptions(url.getProtocol()); file = getManager().resolveFile(url.toString(), opts); if (!file.exists()) { throw new ConfigurationException("File not found"); } if (!file.isFile()) { throw new ConfigurationException("Cannot load a configuration from a directory"); } final FileContent content = file.getContent(); if (content == null) { final String msg = "Cannot access content of " + file.getName().getFriendlyURI(); throw new ConfigurationException(msg); } return content.getInputStream(); } catch (final FileSystemException fse) { final String msg = "Unable to access " + url.toString(); throw new ConfigurationException(msg, fse); } } private FileSystemManager getManager() throws FileSystemException { return VFS.getManager(); } private FileSystemOptions getOptions(final String scheme) { if (scheme == null) { return null; } final FileSystemOptions opts = new FileSystemOptions(); FileSystemConfigBuilder builder; try { builder = getManager().getFileSystemConfigBuilder(scheme); } catch (final Exception ex) { return null; } final FileOptionsProvider provider = getFileOptionsProvider(); if (provider != null) { final Map<String, Object> map = provider.getOptions(); if (map == null) { return null; } int count = 0; for (final Map.Entry<String, Object> entry : map.entrySet()) { try { String key = entry.getKey(); if (FileOptionsProvider.CURRENT_USER.equals(key)) { key = "creatorName"; } setProperty(builder, opts, key, entry.getValue()); ++count; } catch (final Exception ex) { // Ignore an incorrect property. continue; } } if (count > 0) { return opts; } } return null; } @Override public OutputStream getOutputStream(final URL url) throws ConfigurationException { try { final FileSystemOptions opts = getOptions(url.getProtocol()); final FileObject file = getManager().resolveFile(url.toString(), opts); // throw an exception if the target URL is a directory if (file == null || file.isFolder()) { throw new ConfigurationException("Cannot save a configuration to a directory"); } final FileContent content = file.getContent(); if (content == null) { throw new ConfigurationException("Cannot access content of " + url); } return content.getOutputStream(); } catch (final FileSystemException fse) { throw new ConfigurationException("Unable to access " + url, fse); } } @Override public String getPath(final File file, final URL url, final String basePath, final String fileName) { if (file != null) { return super.getPath(file, url, basePath, fileName); } try { if (url != null) { final FileName name = resolveURI(url.toString()); if (name != null) { return name.toString(); } } if (UriParser.extractScheme(fileName) != null) { return fileName; } else if (basePath != null) { final FileName base = resolveURI(basePath); return getManager().resolveName(base, fileName).getURI(); } else { final FileName name = resolveURI(fileName); final FileName base = name.getParent(); return getManager().resolveName(base, name.getBaseName()).getURI(); } } catch (final FileSystemException fse) { fse.printStackTrace(); return null; } } @Override public URL getURL(final String basePath, final String file) throws MalformedURLException { if ((basePath != null && UriParser.extractScheme(basePath) == null) || (basePath == null && UriParser.extractScheme(file) == null)) { return super.getURL(basePath, file); } try { FileName path; if (basePath != null && UriParser.extractScheme(file) == null) { final FileName base = resolveURI(basePath); path = getManager().resolveName(base, file); } else { path = resolveURI(file); } final URLStreamHandler handler = new VFSURLStreamHandler(path); return new URL(null, path.getURI(), handler); } catch (final FileSystemException fse) { throw new ConfigurationRuntimeException("Could not parse basePath: " + basePath + " and fileName: " + file, fse); } } @Override public URL locateFromURL(final String basePath, final String fileName) { final String fileScheme = UriParser.extractScheme(fileName); // Use DefaultFileSystem if basePath and fileName don't have a scheme. if ((basePath == null || UriParser.extractScheme(basePath) == null) && fileScheme == null) { return super.locateFromURL(basePath, fileName); } try { FileObject file; // Only use the base path if the file name doesn't have a scheme. if (basePath != null && fileScheme == null) { final String scheme = UriParser.extractScheme(basePath); final FileSystemOptions opts = getOptions(scheme); FileObject base = getManager().resolveFile(basePath, opts); if (base.isFile()) { base = base.getParent(); } file = getManager().resolveFile(base, fileName); } else { final FileSystemOptions opts = fileScheme != null ? getOptions(fileScheme) : null; file = getManager().resolveFile(fileName, opts); } if (!file.exists()) { return null; } final FileName path = file.getName(); final URLStreamHandler handler = new VFSURLStreamHandler(path); return new URL(null, path.getURI(), handler); } catch (final FileSystemException | MalformedURLException fse) { return null; } } private FileName resolveURI(final String path) throws FileSystemException { return getManager().resolveURI(path); } private void setProperty(final FileSystemConfigBuilder builder, final FileSystemOptions options, final String key, final Object value) { final String methodName = "set" + key.substring(0, 1).toUpperCase() + key.substring(1); final Class<?>[] paramTypes = new Class<?>[2]; paramTypes[0] = FileSystemOptions.class; paramTypes[1] = value.getClass(); try { final Method method = builder.getClass().getMethod(methodName, paramTypes); final Object[] params = new Object[2]; params[0] = options; params[1] = value; method.invoke(builder, params); } catch (final Exception ex) { log.warn("Cannot access property '" + key + "'! Ignoring.", ex); } } }
/* * Copyright 2016 Patrick Valsecchi * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ch.thus.camel.retry; import org.apache.camel.CamelExecutionException; import org.apache.camel.builder.RouteBuilder; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.Collections; import java.util.List; public class RetryDefaultTest extends BaseRetryTest { @Test public void testSuccessRightAway() throws InterruptedException { subEndpoint.expectedBodiesReceived(BODY); result.expectedBodiesReceived(RESPONSE); result.expectedPropertyReceived(TEST_PROP_NAME, TEST_PROP_VALUE); template.sendBody(BODY); } @Test public void testSuccessLastMoment() throws InterruptedException { processor.nbFails = RetryEndpoint.DEFAULT_MAX_TRIES - 1; List<String> bodies = Collections.nCopies(RetryEndpoint.DEFAULT_MAX_TRIES, BODY); subEndpoint.expectedBodiesReceived(bodies); result.expectedBodiesReceived(RESPONSE); result.expectedPropertyReceived(TEST_PROP_NAME, TEST_PROP_VALUE); template.sendBody(BODY); } @Test public void testFail() throws InterruptedException { try { processor.nbFails = RetryEndpoint.DEFAULT_MAX_TRIES; List<String> bodies = Collections.nCopies(RetryEndpoint.DEFAULT_MAX_TRIES, BODY); subEndpoint.expectedBodiesReceived(bodies); result.expectedMessageCount(0); template.sendBody(BODY); fail("Expected a failure"); } catch (CamelExecutionException camelException) { assertEquals(RetryExhaustedException.class, camelException.getCause().getClass()); } } @Test public void testFatal() throws InterruptedException { try { processor.fatal = true; processor.nbFails = RetryEndpoint.DEFAULT_MAX_TRIES; subEndpoint.expectedBodiesReceived(BODY); result.expectedMessageCount(0); template.sendBody(BODY); fail("Expected a failure"); } catch (CamelExecutionException camelException) { assertEquals(FailRetryException.class, camelException.getCause().getClass()); } } @Test public void testStreamBodiesAreSupported() throws Exception { processor.nbFails = 1; List<String> bodies = Collections.nCopies(2, BODY); subEndpoint.expectedBodiesReceived(bodies); result.expectedBodiesReceived(RESPONSE); InputStream body = new ByteArrayInputStream(BODY.getBytes("utf-8")); template.sendBody(body); } @Override protected RouteBuilder createRouteBuilder() throws Exception { processor = new FailProcessor(); return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start").to("retry:direct:sub").to("mock:result"); from("direct:sub").to("mock:sub").process(processor); } }; } }
/* * Copyright 2010-2021 JetBrains s.r.o. and Kotlin Programming Language contributors. * Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file. */ package org.jetbrains.kotlin.idea.script; import com.intellij.testFramework.TestDataPath; import org.jetbrains.kotlin.idea.test.JUnit3RunnerWithInners; import org.jetbrains.kotlin.idea.test.KotlinTestUtils; import org.jetbrains.kotlin.test.TestMetadata; import org.jetbrains.kotlin.idea.test.TestRoot; import org.junit.runner.RunWith; /* * This class is generated by {@link org.jetbrains.kotlin.generators.tests.TestsPackage}. * DO NOT MODIFY MANUALLY. */ @SuppressWarnings("all") @TestRoot("idea/tests") @TestDataPath("$CONTENT_ROOT") @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/script/definition/navigation") public class ScriptConfigurationNavigationTestGenerated extends AbstractScriptConfigurationNavigationTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("buildSrcProblem") public void testBuildSrcProblem() throws Exception { runTest("testData/script/definition/navigation/buildSrcProblem/"); } @TestMetadata("conflictingModule") public void testConflictingModule() throws Exception { runTest("testData/script/definition/navigation/conflictingModule/"); } @TestMetadata("customBaseClass") public void testCustomBaseClass() throws Exception { runTest("testData/script/definition/navigation/customBaseClass/"); } @TestMetadata("includedPluginProblem") public void testIncludedPluginProblem() throws Exception { runTest("testData/script/definition/navigation/includedPluginProblem/"); } @TestMetadata("javaLib") public void testJavaLib() throws Exception { runTest("testData/script/definition/navigation/javaLib/"); } @TestMetadata("javaLibWithSources") public void testJavaLibWithSources() throws Exception { runTest("testData/script/definition/navigation/javaLibWithSources/"); } @TestMetadata("kotlinLib") public void testKotlinLib() throws Exception { runTest("testData/script/definition/navigation/kotlinLib/"); } @TestMetadata("kotlinLibWithSources") public void testKotlinLibWithSources() throws Exception { runTest("testData/script/definition/navigation/kotlinLibWithSources/"); } @TestMetadata("stdlib") public void testStdlib() throws Exception { runTest("testData/script/definition/navigation/stdlib/"); } @TestMetadata("stdlibWithSources") public void testStdlibWithSources() throws Exception { runTest("testData/script/definition/navigation/stdlibWithSources/"); } }
/* * Copyright 2021 EPAM Systems. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.epam.digital.data.platform.bpms.rest.exception.mapper; import static org.assertj.core.api.Assertions.assertThat; import com.epam.digital.data.platform.starter.errorhandling.exception.SystemException; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.junit.jupiter.api.Test; class CamundaSystemExceptionMapperTest { @Test void testUserDataValidationExceptionMapper() { var ex = new SystemException(null); Response response = new CamundaSystemExceptionMapper().toResponse(ex); assertThat(response.getMediaType()).hasToString(MediaType.APPLICATION_JSON); assertThat(response.getStatus()).isEqualTo(500); assertThat(response.getEntity()).isSameAs(ex); } }
package crazypants.enderio.base.recipe.soul; import com.enderio.core.common.util.NNList; import crazypants.enderio.base.Log; import crazypants.enderio.base.recipe.MachineRecipeRegistry; import crazypants.enderio.util.Prep; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.ResourceLocation; import javax.annotation.Nonnull; public class SoulBinderRecipeManager { private static final @Nonnull SoulBinderRecipeManager instance = new SoulBinderRecipeManager(); public static final @Nonnull String KEY_RECIPE_UID = "recipeUID"; public static final @Nonnull String KEY_INPUT_STACK = "inputStack"; public static final @Nonnull String KEY_OUTPUT_STACK = "outputStack"; public static final @Nonnull String KEY_REQUIRED_ENERGY = "requiredEnergyRF"; public static final @Nonnull String KEY_REQUIRED_XP = "requiredXP"; public static final @Nonnull String KEY_SOUL_TYPES = "entityTypes"; public static SoulBinderRecipeManager getInstance() { return instance; } public void addDefaultRecipes() { MachineRecipeRegistry.instance.registerRecipe(MachineRecipeRegistry.SOULBINDER, SoulBinderSpawnerRecipe.instance); MachineRecipeRegistry.instance.registerRecipe(MachineRecipeRegistry.SOULBINDER, SoulBinderTunedPressurePlateRecipe.instance1); MachineRecipeRegistry.instance.registerRecipe(MachineRecipeRegistry.SOULBINDER, SoulBinderTunedPressurePlateRecipe.instance2); MachineRecipeRegistry.instance.registerRecipe(MachineRecipeRegistry.SOULBINDER, SoulBinderReanimationRecipe.instance); MachineRecipeRegistry.instance.registerRecipe(MachineRecipeRegistry.SOULBINDER, SoulBinderSentientRecipe.instance); MachineRecipeRegistry.instance.registerRecipe(MachineRecipeRegistry.SOULBINDER, SoulBinderEnderCystalRecipe.instance); MachineRecipeRegistry.instance.registerRecipe(MachineRecipeRegistry.SOULBINDER, SoulBinderAttractorCystalRecipe.instance); MachineRecipeRegistry.instance.registerRecipe(MachineRecipeRegistry.SOULBINDER, SoulBinderPrecientCystalRecipe.instance); } //@formatter:off /** * Example of how to add a recipe: * * <pre> NBTTagCompound root = new NBTTagCompound(); * root.setString(SoulBinderRecipeManager.KEY_RECIPE_UID, "diamondToWood"); * root.setInteger(SoulBinderRecipeManager.KEY_REQUIRED_ENERGY, 50000); * root.setInteger(SoulBinderRecipeManager.KEY_REQUIRED_XP, 7); * root.setString(SoulBinderRecipeManager.KEY_SOUL_TYPES, "minecraft:zombie|specialmobs:specialzombie|minecraft:villager"); * ItemStack is = new ItemStack(Items.diamond); * NBTTagCompound stackRoot = new NBTTagCompound(); * is.writeToNBT(stackRoot); * root.setTag(SoulBinderRecipeManager.KEY_INPUT_STACK, stackRoot); * is = new ItemStack(Blocks.planks); * stackRoot = new NBTTagCompound(); * is.writeToNBT(stackRoot); * root.setTag(SoulBinderRecipeManager.KEY_OUTPUT_STACK, stackRoot); * * SoulBinderRecipeManager.getInstance().addRecipeFromNBT(root); * FMLInterModComms.sendMessage("EnderIO", "recipe:soulbinder", root);</pre> * * @param root * @return */ //@formatter:on public boolean addRecipeFromNBT(@Nonnull NBTTagCompound root) { try { String recipeUid = root.getString(KEY_RECIPE_UID); if (recipeUid.trim().length() == 0) { Log.error("SoulBinderRecipeManager: Could not add custom soul binder recipe from IMC as recipe UID not set: " + root); return false; } ItemStack inputStack = getStackFromRoot(root, KEY_INPUT_STACK); if (Prep.isInvalid(inputStack)) { Log.error("SoulBinderRecipeManager: Could not add custom soul binder recipe from IMC as no input stack defined: " + root); return false; } ItemStack outputStack = getStackFromRoot(root, KEY_OUTPUT_STACK); if (Prep.isInvalid(outputStack)) { Log.error("SoulBinderRecipeManager: Could not add custom soul binder recipe from IMC as no output stack defined: " + root); return false; } int energyRequired = root.getInteger(KEY_REQUIRED_ENERGY); if (energyRequired <= 0) { Log.error("SoulBinderRecipeManager: Could not add custom soul binder recipe from IMC as energy required was <= 0: " + root); return false; } int xpLevelsRequired = root.getInteger(KEY_REQUIRED_XP); if (xpLevelsRequired <= 0) { Log.error("SoulBinderRecipeManager: Could not add custom soul binder recipe from IMC as XP required was <= 0: " + root); return false; } String str = root.getString(KEY_SOUL_TYPES); if (str.trim().length() == 0) { Log.error("SoulBinderRecipeManager: Could not add custom soul binder recipe from IMC as no soul types defined: " + root); return false; } String[] entityNames = str.split("\\|"); NNList<ResourceLocation> entityRLs = new NNList<>(); for (String string : entityNames) { if (string == null || string.trim().isEmpty()) { Log.error("SoulBinderRecipeManager: Could not add custom soul binder recipe from IMC as no soul types contains emtpty entry: " + root); return false; } entityRLs.add(new ResourceLocation(string)); } if (entityRLs.isEmpty()) { Log.error("SoulBinderRecipeManager: Could not add custom soul binder recipe from IMC as no soul types defined: " + root); return false; } BasicSoulBinderRecipe recipe = new BasicSoulBinderRecipe(inputStack, outputStack, energyRequired, xpLevelsRequired, recipeUid, entityRLs.toArray(new ResourceLocation[0])); MachineRecipeRegistry.instance.registerRecipe(MachineRecipeRegistry.SOULBINDER, recipe); return true; } catch (Exception e) { Log.error("SoulBinderRecipeManager: Could not add custom soul binder exception thrown when parsing message: " + e); return false; } } private @Nonnull ItemStack getStackFromRoot(@Nonnull NBTTagCompound root, @Nonnull String string) { return new ItemStack(root.getCompoundTag(string)); } }
/* * Copyright 2004-2014 ICEsoft Technologies Canada Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.icemobile.samples.mobileshowcase.view.examples.input.geolocation; import org.icefaces.mobi.utils.MobiJSFUtils; import org.icemobile.samples.mobileshowcase.view.metadata.annotation.*; import org.icemobile.samples.mobileshowcase.view.metadata.context.ExampleImpl; import org.icemobile.util.ClientDescriptor; import javax.faces.bean.ManagedBean; import javax.faces.bean.SessionScoped; import javax.faces.event.ActionEvent; import javax.faces.event.ValueChangeEvent; import java.io.Serializable; import java.util.ArrayList; @Destination( title = "example.input.geolocation.destination.title.short", titleExt = "example.input.geolocation.destination.title.long", titleBack = "example.input.geolocation.destination.title.back" ) @Example( descriptionPath = "/WEB-INF/includes/examples/input/geolocation-desc.xhtml", examplePath = "/WEB-INF/includes/examples/input/geolocation-example.xhtml", resourcesPath = "/WEB-INF/includes/examples/example-resources.xhtml" ) @ExampleResources( resources = { // xhtml @ExampleResource(type = ResourceType.xhtml, title = "geolocation-example.xhtml", resource = "/WEB-INF/includes/examples/input/geolocation-example.xhtml"), // Java Source @ExampleResource(type = ResourceType.java, title = "GeoLocationBean.java", resource = "/WEB-INF/classes/org/icemobile/samples/mobileshowcase" + "/view/examples/input/geolocation/GeoLocationBean.java") } ) @ManagedBean(name = GeoLocationBean.BEAN_NAME) @SessionScoped public class GeoLocationBean extends ExampleImpl<GeoLocationBean> implements Serializable { public static final String BEAN_NAME = "geoLocationBean"; private double latitude = 0.0; private double longitude = 0.0; private double altitude = 0.0; private double direction = 0.0; private int timeout = 30; private int maximumAge = 3600; private String enableHighPrecision = "asNeeded"; private boolean continuousUpdates = true; public GeoLocationBean() { super(GeoLocationBean.class); } public double getLatitude() { return latitude; } public double getLongitude() { return longitude; } public double getLongitudeRead() { return longitude; } public void setLongitude(double longitude) { this.longitude = longitude; } public double getLatitudeRead() { return latitude; } public void setLatitude(double latitude) { this.latitude = latitude; } // lat and long values are read only, no value writing takes place. public void setLongitudeRead(double longitude) { } public void setLatitudeRead(double latitude) { } public double getAltitude() { return altitude; } public void setAltitude(double altitude) { this.altitude = altitude; } public double getDirection() { return direction; } public void setDirection(double direction) { this.direction = direction; } public int getTimeout() { return timeout; } public void setTimeout(int timeout) { this.timeout = timeout; } public int getMaximumAge() { return maximumAge; } public void setMaximumAge(int maximumAge) { this.maximumAge = maximumAge; } public String getEnableHighPrecision() { return enableHighPrecision; } public void setEnableHighPrecision(String enableHighPrecision) { this.enableHighPrecision = enableHighPrecision; } public boolean isContinuousUpdates() { return continuousUpdates; } public void setContinuousUpdates(boolean continuousUpdates) { this.continuousUpdates = continuousUpdates; } public void continuousUpdatesChange(ValueChangeEvent event) { continuousUpdates = ((Boolean) event.getNewValue()).booleanValue(); } public void resetValues(ActionEvent evt){ this.latitude = 0; this.longitude = 0; this.altitude = 0; this.direction = 0; } }
package org.multibit.hd.core.dto; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ListenableFuture; import org.bitcoin.protocols.payments.Protos; import org.bitcoinj.core.Address; import org.bitcoinj.core.Coin; import org.bitcoinj.core.Transaction; import org.bitcoinj.protocols.payments.PaymentProtocol; import org.bitcoinj.protocols.payments.PaymentProtocolException; import org.bitcoinj.protocols.payments.PaymentSession; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.io.IOException; import java.security.KeyStoreException; import java.security.cert.CertPathValidatorException; import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.concurrent.TimeoutException; /** * <p>Value object to provide the following to Core API:</p> * <ul> * <li>Information about a payment session's status</li> * </ul> * * @since 0.0.7 */ public class PaymentSessionSummary { private static final Logger log = LoggerFactory.getLogger(PaymentSessionSummary.class); private final PaymentSessionStatus status; private final Optional<PaymentSession> paymentSession; private final Optional<PaymentProtocol.PkiVerificationData> pkiVerificationData; private final RAGStatus severity; private final CoreMessageKey messageKey; private final Object[] messageData; /** * <p>The server has returned a well-formed payment request</p> * * @param paymentSession The payment session containing meta data (cannot be null to be OK) * @param pkiVerificationData The PKI verification data containing identity information (cannot be null to be OK) * * @return A new "payment session OK" summary */ public static PaymentSessionSummary newPaymentSessionOK(PaymentSession paymentSession, PaymentProtocol.PkiVerificationData pkiVerificationData) { Preconditions.checkNotNull(paymentSession, "'paymentSession' must be present"); return new PaymentSessionSummary( Optional.of(paymentSession), Optional.fromNullable(pkiVerificationData), PaymentSessionStatus.TRUSTED, RAGStatus.GREEN, CoreMessageKey.PAYMENT_SESSION_OK, new String[]{paymentSession.getMemo()} ); } /** * <p>The server has returned a well-formed payment request that has failed PKI validation</p> * * <p>The user may want to proceed under these circumstances so we cater for it.</p> * * @param paymentSession The payment session containing meta data * * @return A new "payment session" summary with appropriate confidence level */ public static PaymentSessionSummary newPaymentSessionAlmostOK(PaymentSession paymentSession, Exception e) { if (e instanceof PaymentProtocolException.InvalidPkiData) { return new PaymentSessionSummary( Optional.of(paymentSession), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_PKI_INVALID, new String[]{paymentSession.getMemo(), e.getMessage()} ); } if (e instanceof PaymentProtocolException.InvalidPkiType) { return new PaymentSessionSummary( Optional.of(paymentSession), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_PKI_INVALID_TYPE, new String[]{paymentSession.getMemo(), e.getMessage()} ); } if (e instanceof PaymentProtocolException.PkiVerificationException) { return new PaymentSessionSummary( Optional.of(paymentSession), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_PKI_MISSING, new String[]{paymentSession.getMemo(), e.getMessage()} ); } if (e instanceof KeyStoreException) { return new PaymentSessionSummary( Optional.of(paymentSession), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_PKI_UNTRUSTED_CA, new String[]{paymentSession.getMemo(), e.getMessage()} ); } // Assume the worst return newPaymentSessionFromException(e, paymentSession.getMemo()); } /** * @param e The payment protocol exception (either an ERROR or a DOWN) * * @return A suitable payment session summary */ public static PaymentSessionSummary newPaymentSessionFromException(Exception e, String hostName) { log.warn("Failed payment server: Host={} Failure={}", hostName, e.getMessage()); // Default handling is ERROR if (e instanceof InterruptedException) { return new PaymentSessionSummary( Optional.<PaymentSession>absent(), null, PaymentSessionStatus.DOWN, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_DOWN, new String[]{hostName, e.getMessage()} ); } if (e instanceof TimeoutException) { return new PaymentSessionSummary( Optional.<PaymentSession>absent(), null, PaymentSessionStatus.DOWN, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_DOWN, new String[]{hostName, e.getMessage()} ); } // Use default response return new PaymentSessionSummary( Optional.<PaymentSession>absent(), null, PaymentSessionStatus.ERROR, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_ERROR, new String[]{hostName, e.getMessage()} ); } /** * @param e The payment protocol exception (specific problem) * @param hostName The host name * * @return A suitable payment session summary */ public static PaymentSessionSummary newPaymentSessionFromException(PaymentProtocolException e, String hostName) { log.warn("Failed payment session: Host={} Failure={}", hostName, e.getMessage()); // Default handling is ERROR if (e instanceof PaymentProtocolException.Expired) { return new PaymentSessionSummary( Optional.<PaymentSession>absent(), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_EXPIRED, new String[]{hostName, e.getMessage()} ); } if (e instanceof PaymentProtocolException.InvalidNetwork) { return new PaymentSessionSummary( Optional.<PaymentSession>absent(), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_INVALID_NETWORK, new String[]{hostName, e.getMessage()} ); } if (e instanceof PaymentProtocolException.InvalidOutputs) { return new PaymentSessionSummary( Optional.<PaymentSession>absent(), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_INVALID_OUTPUTS, new String[]{hostName, e.getMessage()} ); } if (e instanceof PaymentProtocolException.InvalidPaymentRequestURL) { return new PaymentSessionSummary( Optional.<PaymentSession>absent(), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_INVALID_REQUEST_URL, new String[]{hostName, e.getMessage()} ); } if (e instanceof PaymentProtocolException.InvalidPaymentURL) { return new PaymentSessionSummary( Optional.<PaymentSession>absent(), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_INVALID_PAYMENT_URL, new String[]{hostName, e.getMessage()} ); } if (e instanceof PaymentProtocolException.InvalidVersion) { return new PaymentSessionSummary( Optional.<PaymentSession>absent(), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_INVALID_VERSION, new String[]{hostName, e.getMessage()} ); } if (e instanceof PaymentProtocolException.InvalidPkiData) { return new PaymentSessionSummary( Optional.<PaymentSession>absent(), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_PKI_INVALID, new String[]{hostName, e.getMessage()} ); } if (e instanceof PaymentProtocolException.InvalidPkiType) { return new PaymentSessionSummary( Optional.<PaymentSession>absent(), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_PKI_INVALID_TYPE, new String[]{hostName, e.getMessage()} ); } if (e instanceof PaymentProtocolException.PkiVerificationException) { // This is a bit lame but the only way to differentiate PKI failures from untrusted if (e.getCause() != null && e.getCause() instanceof CertPathValidatorException) { // Untrusted CA (user might want to add it to the trust store) return new PaymentSessionSummary( Optional.<PaymentSession>absent(), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_PKI_UNTRUSTED_CA, new String[]{hostName, e.getMessage()} ); } else { return new PaymentSessionSummary( Optional.<PaymentSession>absent(), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.UNTRUSTED, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_PKI_MISSING, new String[]{hostName, e.getMessage()} ); } } // Unknown return new PaymentSessionSummary( Optional.<PaymentSession>absent(), Optional.<PaymentProtocol.PkiVerificationData>absent(), PaymentSessionStatus.ERROR, RAGStatus.AMBER, CoreMessageKey.PAYMENT_SESSION_ERROR, new String[]{hostName, e.getMessage()} ); } /** * <p>See the utility factory methods for standard situations</p> * * @param paymentSession The optional payment session * @param pkiVerificationData The PKI verification data * @param status The payment session status (e.g. OK) * @param severity The severity (Red, Amber, Green) * @param messageKey The error key to allow localisation * @param messageData The error data for insertion into the error message */ public PaymentSessionSummary( Optional<PaymentSession> paymentSession, Optional<PaymentProtocol.PkiVerificationData> pkiVerificationData, PaymentSessionStatus status, RAGStatus severity, CoreMessageKey messageKey, Object[] messageData) { this.paymentSession = paymentSession; this.pkiVerificationData = pkiVerificationData; this.status = status; this.severity = severity; this.messageKey = messageKey; this.messageData = Arrays.copyOf(messageData, messageData.length); } /** * @return true if there is a payment session */ public boolean hasPaymentSession() { return paymentSession.isPresent(); } /** * @return optional boolean, which holds true if the payment session has outputs */ public Optional<Boolean> hasPaymentSessionOutputs() { if (hasPaymentSession()) { return Optional.of(!paymentSession.get().getOutputs().isEmpty()); } else { return Optional.absent(); } } /** * @return The memo from the payment session object */ public Optional<String> getPaymentSessionMemo() { if (hasPaymentSession()) { return Optional.fromNullable(paymentSession.get().getMemo()); } else { return Optional.absent(); } } /** * @return The value from the payment session object */ public Optional<Coin> getPaymentSessionValue() { if (hasPaymentSession()) { return Optional.fromNullable(paymentSession.get().getValue()); } else { return Optional.absent(); } } /** * @return The expires from the payment session object */ public Optional<DateTime> getPaymentSessionExpires() { if (hasPaymentSession() && paymentSession.get().getExpires() != null) { return Optional.of(new DateTime(paymentSession.get().getExpires())); } else { return Optional.absent(); } } /** * @return The date from the payment session object */ public Optional<DateTime> getPaymentSessionDate() { if (hasPaymentSession() && paymentSession.get().getDate() != null) { return Optional.of(new DateTime(paymentSession.get().getDate())); } else { return Optional.absent(); } } /** * @return The payment URL from the payment session object */ public Optional<String> getPaymentSessionPaymentUrl() { if (hasPaymentSession()) { return Optional.fromNullable(paymentSession.get().getPaymentUrl()); } else { return Optional.absent(); } } /** * @return The payment request from the payment session object */ public Optional<Protos.PaymentRequest> getPaymentSessionPaymentRequest() { if (hasPaymentSession()) { return Optional.fromNullable(paymentSession.get().getPaymentRequest()); } else { return Optional.absent(); } } public Optional<PaymentProtocolResponseDto> sendPaymentSessionPayment(List<Transaction> transactions, @Nullable Address refundAddr, @Nullable String memo) throws IOException, PaymentProtocolException { if (hasPaymentSession()) { log.debug("Sending payment details to requester at URL '{}'", paymentSession.get().getPaymentUrl()); Protos.Payment payment = paymentSession.get().getPayment(transactions, refundAddr, memo); ListenableFuture<PaymentProtocol.Ack> future = paymentSession.get().sendPayment(transactions, refundAddr, memo); return Optional.of(new PaymentProtocolResponseDto(payment, future)); } else { return Optional.absent(); } } /** * Just a data holder for the result when sending the payment. */ public static class PaymentProtocolResponseDto { private final Protos.Payment finalPayment; private final ListenableFuture<PaymentProtocol.Ack> future; public PaymentProtocolResponseDto(Protos.Payment finalPayment, ListenableFuture<PaymentProtocol.Ack> future) { this.finalPayment = finalPayment; this.future = future; } public Protos.Payment getFinalPayment() { return finalPayment; } public ListenableFuture<PaymentProtocol.Ack> getFuture() { return future; } } /** * @return The PKI verification data based on a second pass through the Payment Request (accurate) */ public Optional<PaymentProtocol.PkiVerificationData> getPkiVerificationData() { return pkiVerificationData; } /** * @return The severity (e.g. AMBER) */ public RAGStatus getSeverity() { return severity; } /** * @return The payment session status (e.g. "TRUSTED") */ public PaymentSessionStatus getStatus() { return status; } /** * @return An optional array of arbitrary objects, often for insertion into a resource bundle string */ public Object[] getMessageData() { return Arrays.copyOf(messageData, messageData.length); } public CoreMessageKey getMessageKey() { return messageKey; } @Override public String toString() { return "PaymentSessionSummary{" + "messageData=" + Arrays.toString(messageData) + ", status=" + status + ", paymentSession=" + paymentSession + ", pkiVerificationData=" + pkiVerificationData + ", severity=" + severity + ", messageKey=" + messageKey + '}'; } }
package com.github.drbookings.io; import java.io.IOException; import java.nio.file.Path; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import com.github.drbookings.ser.DataStoreCoreSer2; import com.github.drbookings.ser.MarshallListener; public class ToXMLWriter { public void write(final DataStoreCoreSer2 data, final Path outPath) throws IOException { try { final JAXBContext jc = JAXBContext.newInstance(DataStoreCoreSer2.class); final Marshaller jaxbMarshaller = jc.createMarshaller(); jaxbMarshaller.setListener(new MarshallListener()); jaxbMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); jaxbMarshaller.marshal(data, outPath.toFile()); } catch (final JAXBException e) { throw new IOException(e); } } }
package AlexGame; import javax.imageio.ImageIO; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.awt.image.BufferedImage; import java.io.*; /** * Created by Rak Alexey on 5/12/17. */ class DemoPanel extends JPanel { private BufferedImage lea; private Lightning lightning; private Zeus zeus; private DuckObservable ducks; private boolean end = false; private Timer timer; private Font textFont; private int width; private int height; DemoPanel(int newWidth, int newHeight) { super(); Dimension windowSize = new Dimension(newWidth, newHeight); setSize(windowSize); setMinimumSize(windowSize); setMaximumSize(windowSize); width = newWidth; height = newHeight; textFont = new Font("name", Font.BOLD, newHeight / 20); try { lea = ImageIO.read(new File("data/lea.jpg")); lightning = new Lightning(); Duck.leftDuck = ImageIO.read(new File("data/duck_life_left.bmp")); Duck.rightDuck = ImageIO.read(new File("data/duck_life_right.bmp")); Duck.dieLeftDuck = ImageIO.read(new File("data/duck_die_left.bmp")); Duck.dieRightDuck = ImageIO.read(new File("data/duck_die_right.bmp")); zeus = new Zeus(width, height); } catch (IOException ex) { } ducks = new DuckObservable(width, height); updateDucks(); timer = new Timer(20, e -> { updateDucks(); repaint(); }); timer.start(); addMouseMotionListener(new PanelMouseMotionListener()); addMouseListener(new PanelMouseListener()); addKeyListener(new PanelKeyListener()); setFocusable(true); setVisible(true); } private void updateDucks() { ducks.move(); } @Override protected void paintComponent(Graphics g) { super.paintComponent(g); g.drawImage(lea, 0, 0, this); g.setFont(textFont); if (!end) { g.drawImage(zeus.getImage(), zeus.getX(), zeus.getY(), this); ducks.getImages(g, this); if (lightning.is()) { g.drawImage(lightning.getImage(), lightning.getX(), 0, this); } g.drawString("Level:" + Integer.toString(ducks.getLevel()), 0, 50); } else { g.drawString("Your Statistic:", width / 3, height * 3 / 10); g.drawString("Level " + Integer.toString(ducks.getLevel()), width / 3, height * 4 / 10); g.drawString("Count hits " + Double.toString(ducks.getHits()), width / 3, height * 5 / 10); g.drawString("Count shoots " + Double.toString(ducks.getShoots()), width / 3, height * 6 / 10); g.drawString("Accuracy " + Double.toString(100 * ducks.getAccuracy()) + "%", width / 3, height * 7 / 10); g.drawString("Lost ducks " + Integer.toString(ducks.getLostDucks()), width / 3, height * 8 / 10); timer.stop(); } } class PanelMouseMotionListener extends MouseMotionAdapter { @Override public void mouseMoved(MouseEvent e) { zeus.setX(e.getX() - 200); } } class PanelMouseListener extends MouseAdapter { @Override public void mouseClicked(MouseEvent e) { lightning.resize(e.getX(), e.getY()); ducks.kill(e.getX(), e.getY()); } } class PanelKeyListener extends KeyAdapter { @Override public void keyTyped(KeyEvent e) { end = true; } } }
package com.acro.hackathon.trekking.POJO.weather; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; public class Sys { @SerializedName("pod") @Expose private String pod; public String getPod() { return pod; } public void setPod(String pod) { this.pod = pod; } }
package part2; public class HayStackNeedleSearcher implements SubstringFrequencySearcher{ String m_searchSpace; HayStackNeedleSearcher(String searchSpace) { m_searchSpace = searchSpace; } @Override public int findFrequency(String subString) { if(m_searchSpace == null) return 0; if(subString == null) return 0; if(subString.isEmpty() && m_searchSpace.isEmpty()) return 1; if(subString.isEmpty()) return m_searchSpace.length(); if(m_searchSpace != null) { int nFrequency = 0; int nMatchedIndex = 0; for(int i=0;i<m_searchSpace.length();i++) { char ch = m_searchSpace.charAt(i); if(Character.toLowerCase(ch) == Character.toLowerCase(subString.charAt(nMatchedIndex))) { nMatchedIndex++; if(subString.length() == nMatchedIndex) { nFrequency++; nMatchedIndex= 0; } } else { nMatchedIndex = 0; } } return nFrequency; } return 0; } }
package cn.gson.oasys.modelV2.po; public class TypePO { private Long typeId; private String typeColor; private String typeModel; private String typeName; private Integer sortValue; public Long getTypeId() { return typeId; } public void setTypeId(Long typeId) { this.typeId = typeId; } public String getTypeColor() { return typeColor; } public void setTypeColor(String typeColor) { this.typeColor = typeColor == null ? null : typeColor.trim(); } public String getTypeModel() { return typeModel; } public void setTypeModel(String typeModel) { this.typeModel = typeModel == null ? null : typeModel.trim(); } public String getTypeName() { return typeName; } public void setTypeName(String typeName) { this.typeName = typeName == null ? null : typeName.trim(); } public Integer getSortValue() { return sortValue; } public void setSortValue(Integer sortValue) { this.sortValue = sortValue; } @Override public boolean equals(Object that) { if (this == that) { return true; } if (that == null) { return false; } if (getClass() != that.getClass()) { return false; } TypePO other = (TypePO) that; return (this.getTypeId() == null ? other.getTypeId() == null : this.getTypeId().equals(other.getTypeId())) && (this.getTypeColor() == null ? other.getTypeColor() == null : this.getTypeColor().equals(other.getTypeColor())) && (this.getTypeModel() == null ? other.getTypeModel() == null : this.getTypeModel().equals(other.getTypeModel())) && (this.getTypeName() == null ? other.getTypeName() == null : this.getTypeName().equals(other.getTypeName())) && (this.getSortValue() == null ? other.getSortValue() == null : this.getSortValue().equals(other.getSortValue())); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((getTypeId() == null) ? 0 : getTypeId().hashCode()); result = prime * result + ((getTypeColor() == null) ? 0 : getTypeColor().hashCode()); result = prime * result + ((getTypeModel() == null) ? 0 : getTypeModel().hashCode()); result = prime * result + ((getTypeName() == null) ? 0 : getTypeName().hashCode()); result = prime * result + ((getSortValue() == null) ? 0 : getSortValue().hashCode()); return result; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(getClass().getSimpleName()); sb.append(" ["); sb.append("Hash = ").append(hashCode()); sb.append(", typeId=").append(typeId); sb.append(", typeColor=").append(typeColor); sb.append(", typeModel=").append(typeModel); sb.append(", typeName=").append(typeName); sb.append(", sortValue=").append(sortValue); sb.append("]"); return sb.toString(); } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.android; import com.facebook.buck.android.packageable.AndroidPackageable; import com.facebook.buck.android.packageable.AndroidPackageableCollector; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.rules.SourcePathRuleFinder; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.jvm.core.JavaAbis; import com.facebook.buck.jvm.core.JavaLibrary; import com.facebook.buck.jvm.java.DefaultJavaLibrary; import com.facebook.buck.jvm.java.ExtraClasspathProvider; import com.facebook.buck.jvm.java.JarBuildStepsFactory; import com.facebook.buck.jvm.java.JavaBuckConfig.UnusedDependenciesAction; import com.facebook.buck.jvm.java.Javac; import com.facebook.buck.jvm.java.JavacOptions; import com.facebook.buck.jvm.java.JavacToJarStepFactory; import com.facebook.buck.jvm.java.RemoveClassesPatternsMatcher; import com.facebook.buck.jvm.java.ResourcesParameters; import com.facebook.buck.jvm.java.abi.AbiGenerationMode; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSortedSet; import java.util.Optional; /** * {@link JavaLibrary} that wraps the output of an {@link AndroidBuildConfig}. * * <p>This is a custom subclass of {@link DefaultJavaLibrary} so that it can have special behavior * when being traversed by an {@link AndroidPackageableCollector}. */ class AndroidBuildConfigJavaLibrary extends DefaultJavaLibrary implements AndroidPackageable { private final AndroidBuildConfig androidBuildConfig; AndroidBuildConfigJavaLibrary( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, SourcePathRuleFinder ruleFinder, Javac javac, JavacOptions javacOptions, AndroidBuildConfig androidBuildConfig) { super( buildTarget, projectFilesystem, new JarBuildStepsFactory( buildTarget, new JavacToJarStepFactory(javac, javacOptions, ExtraClasspathProvider.EMPTY), /* srcs */ ImmutableSortedSet.of(androidBuildConfig.getSourcePathToOutput()), ImmutableSortedSet.of(), ResourcesParameters.of(), /* manifest file */ Optional.empty(), /* postprocessClassesCommands */ ImmutableList.of(), /* trackClassUsage */ javacOptions.trackClassUsage(), /* trackJavacPhaseEvents */ javacOptions.trackJavacPhaseEvents(), /* classesToRemoveFromJar */ RemoveClassesPatternsMatcher.EMPTY, AbiGenerationMode.CLASS, AbiGenerationMode.CLASS, ImmutableList.of(), false), ruleFinder, Optional.empty(), ImmutableSortedSet.of(androidBuildConfig), /* exportedDeps */ ImmutableSortedSet.of(), /* providedDeps */ ImmutableSortedSet.of(), ImmutableSortedSet.of(), /* runtimeDeps */ ImmutableSortedSet.of(), JavaAbis.getClassAbiJar(buildTarget), /* sourceOnlyAbiJar */ null, /* mavenCoords */ Optional.empty(), /* tests */ ImmutableSortedSet.of(), /* requiredForSourceOnlyAbi */ false, UnusedDependenciesAction.IGNORE, Optional.empty(), null, false, false, false); this.androidBuildConfig = androidBuildConfig; Preconditions.checkState(getBuildDeps().contains(androidBuildConfig)); } /** * If an {@link AndroidPackageableCollector} is traversing this rule for an {@link AndroidBinary}, * then it should flag itself as a class that should not be dexed and insert a new classpath entry * for a {@code BuildConfig} with the final values for the APK. */ @Override public void addToCollector(AndroidPackageableCollector collector) { collector.addBuildConfig( getBuildTarget(), androidBuildConfig.getJavaPackage(), androidBuildConfig.getBuildConfigFields()); } public AndroidBuildConfig getAndroidBuildConfig() { return androidBuildConfig; } }
/** * Copyright (C) 2011 Inqwell Ltd * * You may distribute under the terms of the Artistic License, as specified in * the README file. */ /* * $Archive: /src/com/inqwell/any/UnListen.java $ * $Author: sanderst $ * $Revision: 1.2 $ * $Date: 2011-04-07 22:18:20 $ */ package com.inqwell.any; /** * Remove a listener previously placed on a node by Listen. Returns * the node that the listener was removed from. * <p> * @author $Author: sanderst $ * @version $Revision: 1.2 $ */ public class UnListen extends AbstractFunc implements Cloneable { private Any listeningTo_; private Any dispatchingTo_; public UnListen(Any listeningTo, Any dispatchingTo) { listeningTo_ = listeningTo; dispatchingTo_ = dispatchingTo; } public Any exec(Any a) throws AnyException { EventGenerator listeningTo = (EventGenerator)EvalExpr.evalFunc (getTransaction(), a, listeningTo_, EventGenerator.class); // listeningTo cannot be null if (listeningTo == null) nullOperand(listeningTo_); // Check if the single argument version is being used if (dispatchingTo_ == null) { ListenTo.EventDispatcherListeningTo d = (ListenTo.EventDispatcherListeningTo)listeningTo; listeningTo = d.unlisten(); } else { // dispatchingTo can still resolve to null just so we can write // script like // listen(unlisten(listeningTo, dispatchingTo), ...) // when initially there is no dispatcher EventListener dispatchingTo = (EventListener)EvalExpr.evalFunc (getTransaction(), a, dispatchingTo_, EventListener.class); if (dispatchingTo != null && listeningTo != null) listeningTo.removeEventListener(dispatchingTo); } return listeningTo; } public Object clone () throws CloneNotSupportedException { UnListen u = (UnListen)super.clone(); u.listeningTo_ = listeningTo_.cloneAny(); u.dispatchingTo_ = AbstractAny.cloneOrNull(dispatchingTo_); return u; } }
package com.google.android.gms.internal.ads; import com.google.android.gms.internal.ads.zzdob.zze; /* renamed from: com.google.android.gms.internal.ads.zn */ final /* synthetic */ class C9862zn { /* renamed from: a */ static final /* synthetic */ int[] f23749a = new int[zze.m29910a().length]; static { try { f23749a[zze.f28073d - 1] = 1; } catch (NoSuchFieldError e) { } try { f23749a[zze.f28074e - 1] = 2; } catch (NoSuchFieldError e2) { } try { f23749a[zze.f28072c - 1] = 3; } catch (NoSuchFieldError e3) { } try { f23749a[zze.f28075f - 1] = 4; } catch (NoSuchFieldError e4) { } try { f23749a[zze.f28076g - 1] = 5; } catch (NoSuchFieldError e5) { } try { f23749a[zze.f28070a - 1] = 6; } catch (NoSuchFieldError e6) { } try { f23749a[zze.f28071b - 1] = 7; } catch (NoSuchFieldError e7) { } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.karaf.shell.util; import java.io.PrintStream; import java.util.regex.Matcher; import java.util.regex.Pattern; public class IndentFormatter { public static void printFormatted(String prefix, String str, int termWidth, PrintStream out, boolean prefixFirstLine) { int pfxLen = prefix.length(); int maxwidth = termWidth - pfxLen; Pattern wrap = Pattern.compile("(\\S\\S{" + maxwidth + ",}|.{1," + maxwidth + "})(\\s+|$)"); int cur = 0; while (cur >= 0) { int lst = str.indexOf('\n', cur); String s = (lst >= 0) ? str.substring(cur, lst) : str.substring(cur); if (s.length() == 0) { out.println(); } else { Matcher m = wrap.matcher(s); while (m.find()) { if (cur > 0 || prefixFirstLine) { out.print(prefix); } out.println(m.group()); } } if (lst >= 0) { cur = lst + 1; } else { break; } } } public static void printFormatted(String prefix, String str, int termWidth, PrintStream out) { printFormatted(prefix, str, termWidth, out, true); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.batch.implementation; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.AzureServiceFuture; import com.microsoft.azure.CloudException; import com.microsoft.azure.ListOperationCallback; import com.microsoft.azure.management.batch.CertificateCancelDeletionHeaders; import com.microsoft.azure.management.batch.CertificateCreateHeaders; import com.microsoft.azure.management.batch.CertificateCreateOrUpdateParameters; import com.microsoft.azure.management.batch.CertificateDeleteHeaders; import com.microsoft.azure.management.batch.CertificateGetHeaders; import com.microsoft.azure.management.batch.CertificateUpdateHeaders; import com.microsoft.azure.Page; import com.microsoft.azure.PagedList; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.ServiceResponseWithHeaders; import com.microsoft.rest.Validator; import java.io.IOException; import java.util.List; import okhttp3.ResponseBody; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.HTTP; import retrofit2.http.PATCH; import retrofit2.http.Path; import retrofit2.http.POST; import retrofit2.http.PUT; import retrofit2.http.Query; import retrofit2.http.Url; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in Certificates. */ public class CertificatesInner { /** The Retrofit service to perform REST calls. */ private CertificatesService service; /** The service client containing this operation class. */ private BatchManagementClientImpl client; /** * Initializes an instance of CertificatesInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public CertificatesInner(Retrofit retrofit, BatchManagementClientImpl client) { this.service = retrofit.create(CertificatesService.class); this.client = client; } /** * The interface defining all the services for Certificates to be * used by Retrofit to perform actually REST calls. */ interface CertificatesService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.batch.Certificates listByBatchAccount" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/certificates") Observable<Response<ResponseBody>> listByBatchAccount(@Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("subscriptionId") String subscriptionId, @Query("maxresults") Integer maxresults, @Query("$select") String select, @Query("$filter") String filter, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.batch.Certificates create" }) @PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/certificates/{certificateName}") Observable<Response<ResponseBody>> create(@Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("certificateName") String certificateName, @Path("subscriptionId") String subscriptionId, @Body CertificateCreateOrUpdateParameters parameters, @Header("If-Match") String ifMatch, @Header("If-None-Match") String ifNoneMatch, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.batch.Certificates beginCreate" }) @PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/certificates/{certificateName}") Observable<Response<ResponseBody>> beginCreate(@Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("certificateName") String certificateName, @Path("subscriptionId") String subscriptionId, @Body CertificateCreateOrUpdateParameters parameters, @Header("If-Match") String ifMatch, @Header("If-None-Match") String ifNoneMatch, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.batch.Certificates update" }) @PATCH("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/certificates/{certificateName}") Observable<Response<ResponseBody>> update(@Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("certificateName") String certificateName, @Path("subscriptionId") String subscriptionId, @Body CertificateCreateOrUpdateParameters parameters, @Header("If-Match") String ifMatch, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.batch.Certificates delete" }) @HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/certificates/{certificateName}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> delete(@Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("certificateName") String certificateName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.batch.Certificates beginDelete" }) @HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/certificates/{certificateName}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> beginDelete(@Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("certificateName") String certificateName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.batch.Certificates get" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/certificates/{certificateName}") Observable<Response<ResponseBody>> get(@Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("certificateName") String certificateName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.batch.Certificates cancelDeletion" }) @POST("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/certificates/{certificateName}/cancelDelete") Observable<Response<ResponseBody>> cancelDeletion(@Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("certificateName") String certificateName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.batch.Certificates listByBatchAccountNext" }) @GET Observable<Response<ResponseBody>> listByBatchAccountNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * Lists all of the certificates in the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;CertificateInner&gt; object if successful. */ public PagedList<CertificateInner> listByBatchAccount(final String resourceGroupName, final String accountName) { ServiceResponse<Page<CertificateInner>> response = listByBatchAccountSinglePageAsync(resourceGroupName, accountName).toBlocking().single(); return new PagedList<CertificateInner>(response.body()) { @Override public Page<CertificateInner> nextPage(String nextPageLink) { return listByBatchAccountNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Lists all of the certificates in the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<CertificateInner>> listByBatchAccountAsync(final String resourceGroupName, final String accountName, final ListOperationCallback<CertificateInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByBatchAccountSinglePageAsync(resourceGroupName, accountName), new Func1<String, Observable<ServiceResponse<Page<CertificateInner>>>>() { @Override public Observable<ServiceResponse<Page<CertificateInner>>> call(String nextPageLink) { return listByBatchAccountNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Lists all of the certificates in the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;CertificateInner&gt; object */ public Observable<Page<CertificateInner>> listByBatchAccountAsync(final String resourceGroupName, final String accountName) { return listByBatchAccountWithServiceResponseAsync(resourceGroupName, accountName) .map(new Func1<ServiceResponse<Page<CertificateInner>>, Page<CertificateInner>>() { @Override public Page<CertificateInner> call(ServiceResponse<Page<CertificateInner>> response) { return response.body(); } }); } /** * Lists all of the certificates in the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;CertificateInner&gt; object */ public Observable<ServiceResponse<Page<CertificateInner>>> listByBatchAccountWithServiceResponseAsync(final String resourceGroupName, final String accountName) { return listByBatchAccountSinglePageAsync(resourceGroupName, accountName) .concatMap(new Func1<ServiceResponse<Page<CertificateInner>>, Observable<ServiceResponse<Page<CertificateInner>>>>() { @Override public Observable<ServiceResponse<Page<CertificateInner>>> call(ServiceResponse<Page<CertificateInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByBatchAccountNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Lists all of the certificates in the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;CertificateInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<CertificateInner>>> listByBatchAccountSinglePageAsync(final String resourceGroupName, final String accountName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (accountName == null) { throw new IllegalArgumentException("Parameter accountName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } final Integer maxresults = null; final String select = null; final String filter = null; return service.listByBatchAccount(resourceGroupName, accountName, this.client.subscriptionId(), maxresults, select, filter, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<CertificateInner>>>>() { @Override public Observable<ServiceResponse<Page<CertificateInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<CertificateInner>> result = listByBatchAccountDelegate(response); return Observable.just(new ServiceResponse<Page<CertificateInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } /** * Lists all of the certificates in the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param maxresults The maximum number of items to return in the response. * @param select Comma separated list of properties that should be returned. e.g. "properties/provisioningState". Only top level properties under properties/ are valid for selection. * @param filter OData filter expression. Valid properties for filtering are "properties/provisioningState", "properties/provisioningStateTransitionTime", "name". * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;CertificateInner&gt; object if successful. */ public PagedList<CertificateInner> listByBatchAccount(final String resourceGroupName, final String accountName, final Integer maxresults, final String select, final String filter) { ServiceResponse<Page<CertificateInner>> response = listByBatchAccountSinglePageAsync(resourceGroupName, accountName, maxresults, select, filter).toBlocking().single(); return new PagedList<CertificateInner>(response.body()) { @Override public Page<CertificateInner> nextPage(String nextPageLink) { return listByBatchAccountNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Lists all of the certificates in the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param maxresults The maximum number of items to return in the response. * @param select Comma separated list of properties that should be returned. e.g. "properties/provisioningState". Only top level properties under properties/ are valid for selection. * @param filter OData filter expression. Valid properties for filtering are "properties/provisioningState", "properties/provisioningStateTransitionTime", "name". * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<CertificateInner>> listByBatchAccountAsync(final String resourceGroupName, final String accountName, final Integer maxresults, final String select, final String filter, final ListOperationCallback<CertificateInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByBatchAccountSinglePageAsync(resourceGroupName, accountName, maxresults, select, filter), new Func1<String, Observable<ServiceResponse<Page<CertificateInner>>>>() { @Override public Observable<ServiceResponse<Page<CertificateInner>>> call(String nextPageLink) { return listByBatchAccountNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Lists all of the certificates in the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param maxresults The maximum number of items to return in the response. * @param select Comma separated list of properties that should be returned. e.g. "properties/provisioningState". Only top level properties under properties/ are valid for selection. * @param filter OData filter expression. Valid properties for filtering are "properties/provisioningState", "properties/provisioningStateTransitionTime", "name". * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;CertificateInner&gt; object */ public Observable<Page<CertificateInner>> listByBatchAccountAsync(final String resourceGroupName, final String accountName, final Integer maxresults, final String select, final String filter) { return listByBatchAccountWithServiceResponseAsync(resourceGroupName, accountName, maxresults, select, filter) .map(new Func1<ServiceResponse<Page<CertificateInner>>, Page<CertificateInner>>() { @Override public Page<CertificateInner> call(ServiceResponse<Page<CertificateInner>> response) { return response.body(); } }); } /** * Lists all of the certificates in the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param maxresults The maximum number of items to return in the response. * @param select Comma separated list of properties that should be returned. e.g. "properties/provisioningState". Only top level properties under properties/ are valid for selection. * @param filter OData filter expression. Valid properties for filtering are "properties/provisioningState", "properties/provisioningStateTransitionTime", "name". * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;CertificateInner&gt; object */ public Observable<ServiceResponse<Page<CertificateInner>>> listByBatchAccountWithServiceResponseAsync(final String resourceGroupName, final String accountName, final Integer maxresults, final String select, final String filter) { return listByBatchAccountSinglePageAsync(resourceGroupName, accountName, maxresults, select, filter) .concatMap(new Func1<ServiceResponse<Page<CertificateInner>>, Observable<ServiceResponse<Page<CertificateInner>>>>() { @Override public Observable<ServiceResponse<Page<CertificateInner>>> call(ServiceResponse<Page<CertificateInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByBatchAccountNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Lists all of the certificates in the specified account. * ServiceResponse<PageImpl<CertificateInner>> * @param resourceGroupName The name of the resource group that contains the Batch account. ServiceResponse<PageImpl<CertificateInner>> * @param accountName The name of the Batch account. ServiceResponse<PageImpl<CertificateInner>> * @param maxresults The maximum number of items to return in the response. ServiceResponse<PageImpl<CertificateInner>> * @param select Comma separated list of properties that should be returned. e.g. "properties/provisioningState". Only top level properties under properties/ are valid for selection. ServiceResponse<PageImpl<CertificateInner>> * @param filter OData filter expression. Valid properties for filtering are "properties/provisioningState", "properties/provisioningStateTransitionTime", "name". * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;CertificateInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<CertificateInner>>> listByBatchAccountSinglePageAsync(final String resourceGroupName, final String accountName, final Integer maxresults, final String select, final String filter) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (accountName == null) { throw new IllegalArgumentException("Parameter accountName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.listByBatchAccount(resourceGroupName, accountName, this.client.subscriptionId(), maxresults, select, filter, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<CertificateInner>>>>() { @Override public Observable<ServiceResponse<Page<CertificateInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<CertificateInner>> result = listByBatchAccountDelegate(response); return Observable.just(new ServiceResponse<Page<CertificateInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<CertificateInner>> listByBatchAccountDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<CertificateInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<CertificateInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the CertificateInner object if successful. */ public CertificateInner create(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters) { return createWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters).toBlocking().last().body(); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<CertificateInner> createAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, final ServiceCallback<CertificateInner> serviceCallback) { return ServiceFuture.fromHeaderResponse(createWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters), serviceCallback); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<CertificateInner> createAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters) { return createWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters).map(new Func1<ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders>, CertificateInner>() { @Override public CertificateInner call(ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders> response) { return response.body(); } }); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders>> createWithServiceResponseAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (accountName == null) { throw new IllegalArgumentException("Parameter accountName is required and cannot be null."); } if (certificateName == null) { throw new IllegalArgumentException("Parameter certificateName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Validator.validate(parameters); final String ifMatch = null; final String ifNoneMatch = null; Observable<Response<ResponseBody>> observable = service.create(resourceGroupName, accountName, certificateName, this.client.subscriptionId(), parameters, ifMatch, ifNoneMatch, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPutOrPatchResultWithHeadersAsync(observable, new TypeToken<CertificateInner>() { }.getType(), CertificateCreateHeaders.class); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @param ifMatch The entity state (ETag) version of the certificate to update. A value of "*" can be used to apply the operation only if the certificate already exists. If omitted, this operation will always be applied. * @param ifNoneMatch Set to '*' to allow a new certificate to be created, but to prevent updating an existing certificate. Other values will be ignored. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the CertificateInner object if successful. */ public CertificateInner create(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, String ifMatch, String ifNoneMatch) { return createWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters, ifMatch, ifNoneMatch).toBlocking().last().body(); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @param ifMatch The entity state (ETag) version of the certificate to update. A value of "*" can be used to apply the operation only if the certificate already exists. If omitted, this operation will always be applied. * @param ifNoneMatch Set to '*' to allow a new certificate to be created, but to prevent updating an existing certificate. Other values will be ignored. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<CertificateInner> createAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, String ifMatch, String ifNoneMatch, final ServiceCallback<CertificateInner> serviceCallback) { return ServiceFuture.fromHeaderResponse(createWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters, ifMatch, ifNoneMatch), serviceCallback); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @param ifMatch The entity state (ETag) version of the certificate to update. A value of "*" can be used to apply the operation only if the certificate already exists. If omitted, this operation will always be applied. * @param ifNoneMatch Set to '*' to allow a new certificate to be created, but to prevent updating an existing certificate. Other values will be ignored. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<CertificateInner> createAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, String ifMatch, String ifNoneMatch) { return createWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters, ifMatch, ifNoneMatch).map(new Func1<ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders>, CertificateInner>() { @Override public CertificateInner call(ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders> response) { return response.body(); } }); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @param ifMatch The entity state (ETag) version of the certificate to update. A value of "*" can be used to apply the operation only if the certificate already exists. If omitted, this operation will always be applied. * @param ifNoneMatch Set to '*' to allow a new certificate to be created, but to prevent updating an existing certificate. Other values will be ignored. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders>> createWithServiceResponseAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, String ifMatch, String ifNoneMatch) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (accountName == null) { throw new IllegalArgumentException("Parameter accountName is required and cannot be null."); } if (certificateName == null) { throw new IllegalArgumentException("Parameter certificateName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Validator.validate(parameters); Observable<Response<ResponseBody>> observable = service.create(resourceGroupName, accountName, certificateName, this.client.subscriptionId(), parameters, ifMatch, ifNoneMatch, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPutOrPatchResultWithHeadersAsync(observable, new TypeToken<CertificateInner>() { }.getType(), CertificateCreateHeaders.class); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the CertificateInner object if successful. */ public CertificateInner beginCreate(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters) { return beginCreateWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters).toBlocking().single().body(); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<CertificateInner> beginCreateAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, final ServiceCallback<CertificateInner> serviceCallback) { return ServiceFuture.fromHeaderResponse(beginCreateWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters), serviceCallback); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CertificateInner object */ public Observable<CertificateInner> beginCreateAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters) { return beginCreateWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters).map(new Func1<ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders>, CertificateInner>() { @Override public CertificateInner call(ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders> response) { return response.body(); } }); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CertificateInner object */ public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders>> beginCreateWithServiceResponseAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (accountName == null) { throw new IllegalArgumentException("Parameter accountName is required and cannot be null."); } if (certificateName == null) { throw new IllegalArgumentException("Parameter certificateName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Validator.validate(parameters); final String ifMatch = null; final String ifNoneMatch = null; return service.beginCreate(resourceGroupName, accountName, certificateName, this.client.subscriptionId(), parameters, ifMatch, ifNoneMatch, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders>>>() { @Override public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders>> call(Response<ResponseBody> response) { try { ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders> clientResponse = beginCreateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @param ifMatch The entity state (ETag) version of the certificate to update. A value of "*" can be used to apply the operation only if the certificate already exists. If omitted, this operation will always be applied. * @param ifNoneMatch Set to '*' to allow a new certificate to be created, but to prevent updating an existing certificate. Other values will be ignored. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the CertificateInner object if successful. */ public CertificateInner beginCreate(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, String ifMatch, String ifNoneMatch) { return beginCreateWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters, ifMatch, ifNoneMatch).toBlocking().single().body(); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @param ifMatch The entity state (ETag) version of the certificate to update. A value of "*" can be used to apply the operation only if the certificate already exists. If omitted, this operation will always be applied. * @param ifNoneMatch Set to '*' to allow a new certificate to be created, but to prevent updating an existing certificate. Other values will be ignored. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<CertificateInner> beginCreateAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, String ifMatch, String ifNoneMatch, final ServiceCallback<CertificateInner> serviceCallback) { return ServiceFuture.fromHeaderResponse(beginCreateWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters, ifMatch, ifNoneMatch), serviceCallback); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @param ifMatch The entity state (ETag) version of the certificate to update. A value of "*" can be used to apply the operation only if the certificate already exists. If omitted, this operation will always be applied. * @param ifNoneMatch Set to '*' to allow a new certificate to be created, but to prevent updating an existing certificate. Other values will be ignored. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CertificateInner object */ public Observable<CertificateInner> beginCreateAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, String ifMatch, String ifNoneMatch) { return beginCreateWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters, ifMatch, ifNoneMatch).map(new Func1<ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders>, CertificateInner>() { @Override public CertificateInner call(ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders> response) { return response.body(); } }); } /** * Creates a new certificate inside the specified account. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Additional parameters for certificate creation. * @param ifMatch The entity state (ETag) version of the certificate to update. A value of "*" can be used to apply the operation only if the certificate already exists. If omitted, this operation will always be applied. * @param ifNoneMatch Set to '*' to allow a new certificate to be created, but to prevent updating an existing certificate. Other values will be ignored. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CertificateInner object */ public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders>> beginCreateWithServiceResponseAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, String ifMatch, String ifNoneMatch) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (accountName == null) { throw new IllegalArgumentException("Parameter accountName is required and cannot be null."); } if (certificateName == null) { throw new IllegalArgumentException("Parameter certificateName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Validator.validate(parameters); return service.beginCreate(resourceGroupName, accountName, certificateName, this.client.subscriptionId(), parameters, ifMatch, ifNoneMatch, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders>>>() { @Override public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders>> call(Response<ResponseBody> response) { try { ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders> clientResponse = beginCreateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponseWithHeaders<CertificateInner, CertificateCreateHeaders> beginCreateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<CertificateInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<CertificateInner>() { }.getType()) .registerError(CloudException.class) .buildWithHeaders(response, CertificateCreateHeaders.class); } /** * Updates the properties of an existing certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Certificate entity to update. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the CertificateInner object if successful. */ public CertificateInner update(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters) { return updateWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters).toBlocking().single().body(); } /** * Updates the properties of an existing certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Certificate entity to update. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<CertificateInner> updateAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, final ServiceCallback<CertificateInner> serviceCallback) { return ServiceFuture.fromHeaderResponse(updateWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters), serviceCallback); } /** * Updates the properties of an existing certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Certificate entity to update. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CertificateInner object */ public Observable<CertificateInner> updateAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters) { return updateWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters).map(new Func1<ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders>, CertificateInner>() { @Override public CertificateInner call(ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders> response) { return response.body(); } }); } /** * Updates the properties of an existing certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Certificate entity to update. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CertificateInner object */ public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders>> updateWithServiceResponseAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (accountName == null) { throw new IllegalArgumentException("Parameter accountName is required and cannot be null."); } if (certificateName == null) { throw new IllegalArgumentException("Parameter certificateName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Validator.validate(parameters); final String ifMatch = null; return service.update(resourceGroupName, accountName, certificateName, this.client.subscriptionId(), parameters, ifMatch, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders>>>() { @Override public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders>> call(Response<ResponseBody> response) { try { ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders> clientResponse = updateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } /** * Updates the properties of an existing certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Certificate entity to update. * @param ifMatch The entity state (ETag) version of the certificate to update. This value can be omitted or set to "*" to apply the operation unconditionally. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the CertificateInner object if successful. */ public CertificateInner update(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, String ifMatch) { return updateWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters, ifMatch).toBlocking().single().body(); } /** * Updates the properties of an existing certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Certificate entity to update. * @param ifMatch The entity state (ETag) version of the certificate to update. This value can be omitted or set to "*" to apply the operation unconditionally. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<CertificateInner> updateAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, String ifMatch, final ServiceCallback<CertificateInner> serviceCallback) { return ServiceFuture.fromHeaderResponse(updateWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters, ifMatch), serviceCallback); } /** * Updates the properties of an existing certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Certificate entity to update. * @param ifMatch The entity state (ETag) version of the certificate to update. This value can be omitted or set to "*" to apply the operation unconditionally. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CertificateInner object */ public Observable<CertificateInner> updateAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, String ifMatch) { return updateWithServiceResponseAsync(resourceGroupName, accountName, certificateName, parameters, ifMatch).map(new Func1<ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders>, CertificateInner>() { @Override public CertificateInner call(ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders> response) { return response.body(); } }); } /** * Updates the properties of an existing certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param parameters Certificate entity to update. * @param ifMatch The entity state (ETag) version of the certificate to update. This value can be omitted or set to "*" to apply the operation unconditionally. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CertificateInner object */ public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders>> updateWithServiceResponseAsync(String resourceGroupName, String accountName, String certificateName, CertificateCreateOrUpdateParameters parameters, String ifMatch) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (accountName == null) { throw new IllegalArgumentException("Parameter accountName is required and cannot be null."); } if (certificateName == null) { throw new IllegalArgumentException("Parameter certificateName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Validator.validate(parameters); return service.update(resourceGroupName, accountName, certificateName, this.client.subscriptionId(), parameters, ifMatch, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders>>>() { @Override public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders>> call(Response<ResponseBody> response) { try { ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders> clientResponse = updateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponseWithHeaders<CertificateInner, CertificateUpdateHeaders> updateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<CertificateInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<CertificateInner>() { }.getType()) .registerError(CloudException.class) .buildWithHeaders(response, CertificateUpdateHeaders.class); } /** * Deletes the specified certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void delete(String resourceGroupName, String accountName, String certificateName) { deleteWithServiceResponseAsync(resourceGroupName, accountName, certificateName).toBlocking().last().body(); } /** * Deletes the specified certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> deleteAsync(String resourceGroupName, String accountName, String certificateName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromHeaderResponse(deleteWithServiceResponseAsync(resourceGroupName, accountName, certificateName), serviceCallback); } /** * Deletes the specified certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<Void> deleteAsync(String resourceGroupName, String accountName, String certificateName) { return deleteWithServiceResponseAsync(resourceGroupName, accountName, certificateName).map(new Func1<ServiceResponseWithHeaders<Void, CertificateDeleteHeaders>, Void>() { @Override public Void call(ServiceResponseWithHeaders<Void, CertificateDeleteHeaders> response) { return response.body(); } }); } /** * Deletes the specified certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<ServiceResponseWithHeaders<Void, CertificateDeleteHeaders>> deleteWithServiceResponseAsync(String resourceGroupName, String accountName, String certificateName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (accountName == null) { throw new IllegalArgumentException("Parameter accountName is required and cannot be null."); } if (certificateName == null) { throw new IllegalArgumentException("Parameter certificateName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Observable<Response<ResponseBody>> observable = service.delete(resourceGroupName, accountName, certificateName, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPostOrDeleteResultWithHeadersAsync(observable, new TypeToken<Void>() { }.getType(), CertificateDeleteHeaders.class); } /** * Deletes the specified certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void beginDelete(String resourceGroupName, String accountName, String certificateName) { beginDeleteWithServiceResponseAsync(resourceGroupName, accountName, certificateName).toBlocking().single().body(); } /** * Deletes the specified certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> beginDeleteAsync(String resourceGroupName, String accountName, String certificateName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromHeaderResponse(beginDeleteWithServiceResponseAsync(resourceGroupName, accountName, certificateName), serviceCallback); } /** * Deletes the specified certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponseWithHeaders} object if successful. */ public Observable<Void> beginDeleteAsync(String resourceGroupName, String accountName, String certificateName) { return beginDeleteWithServiceResponseAsync(resourceGroupName, accountName, certificateName).map(new Func1<ServiceResponseWithHeaders<Void, CertificateDeleteHeaders>, Void>() { @Override public Void call(ServiceResponseWithHeaders<Void, CertificateDeleteHeaders> response) { return response.body(); } }); } /** * Deletes the specified certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponseWithHeaders} object if successful. */ public Observable<ServiceResponseWithHeaders<Void, CertificateDeleteHeaders>> beginDeleteWithServiceResponseAsync(String resourceGroupName, String accountName, String certificateName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (accountName == null) { throw new IllegalArgumentException("Parameter accountName is required and cannot be null."); } if (certificateName == null) { throw new IllegalArgumentException("Parameter certificateName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.beginDelete(resourceGroupName, accountName, certificateName, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<Void, CertificateDeleteHeaders>>>() { @Override public Observable<ServiceResponseWithHeaders<Void, CertificateDeleteHeaders>> call(Response<ResponseBody> response) { try { ServiceResponseWithHeaders<Void, CertificateDeleteHeaders> clientResponse = beginDeleteDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponseWithHeaders<Void, CertificateDeleteHeaders> beginDeleteDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .register(202, new TypeToken<Void>() { }.getType()) .register(204, new TypeToken<Void>() { }.getType()) .registerError(CloudException.class) .buildWithHeaders(response, CertificateDeleteHeaders.class); } /** * Gets information about the specified certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the CertificateInner object if successful. */ public CertificateInner get(String resourceGroupName, String accountName, String certificateName) { return getWithServiceResponseAsync(resourceGroupName, accountName, certificateName).toBlocking().single().body(); } /** * Gets information about the specified certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<CertificateInner> getAsync(String resourceGroupName, String accountName, String certificateName, final ServiceCallback<CertificateInner> serviceCallback) { return ServiceFuture.fromHeaderResponse(getWithServiceResponseAsync(resourceGroupName, accountName, certificateName), serviceCallback); } /** * Gets information about the specified certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CertificateInner object */ public Observable<CertificateInner> getAsync(String resourceGroupName, String accountName, String certificateName) { return getWithServiceResponseAsync(resourceGroupName, accountName, certificateName).map(new Func1<ServiceResponseWithHeaders<CertificateInner, CertificateGetHeaders>, CertificateInner>() { @Override public CertificateInner call(ServiceResponseWithHeaders<CertificateInner, CertificateGetHeaders> response) { return response.body(); } }); } /** * Gets information about the specified certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CertificateInner object */ public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateGetHeaders>> getWithServiceResponseAsync(String resourceGroupName, String accountName, String certificateName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (accountName == null) { throw new IllegalArgumentException("Parameter accountName is required and cannot be null."); } if (certificateName == null) { throw new IllegalArgumentException("Parameter certificateName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.get(resourceGroupName, accountName, certificateName, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<CertificateInner, CertificateGetHeaders>>>() { @Override public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateGetHeaders>> call(Response<ResponseBody> response) { try { ServiceResponseWithHeaders<CertificateInner, CertificateGetHeaders> clientResponse = getDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponseWithHeaders<CertificateInner, CertificateGetHeaders> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<CertificateInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<CertificateInner>() { }.getType()) .registerError(CloudException.class) .buildWithHeaders(response, CertificateGetHeaders.class); } /** * Cancels a failed deletion of a certificate from the specified account. * If you try to delete a certificate that is being used by a pool or compute node, the status of the certificate changes to deleteFailed. If you decide that you want to continue using the certificate, you can use this operation to set the status of the certificate back to active. If you intend to delete the certificate, you do not need to run this operation after the deletion failed. You must make sure that the certificate is not being used by any resources, and then you can try again to delete the certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the CertificateInner object if successful. */ public CertificateInner cancelDeletion(String resourceGroupName, String accountName, String certificateName) { return cancelDeletionWithServiceResponseAsync(resourceGroupName, accountName, certificateName).toBlocking().single().body(); } /** * Cancels a failed deletion of a certificate from the specified account. * If you try to delete a certificate that is being used by a pool or compute node, the status of the certificate changes to deleteFailed. If you decide that you want to continue using the certificate, you can use this operation to set the status of the certificate back to active. If you intend to delete the certificate, you do not need to run this operation after the deletion failed. You must make sure that the certificate is not being used by any resources, and then you can try again to delete the certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<CertificateInner> cancelDeletionAsync(String resourceGroupName, String accountName, String certificateName, final ServiceCallback<CertificateInner> serviceCallback) { return ServiceFuture.fromHeaderResponse(cancelDeletionWithServiceResponseAsync(resourceGroupName, accountName, certificateName), serviceCallback); } /** * Cancels a failed deletion of a certificate from the specified account. * If you try to delete a certificate that is being used by a pool or compute node, the status of the certificate changes to deleteFailed. If you decide that you want to continue using the certificate, you can use this operation to set the status of the certificate back to active. If you intend to delete the certificate, you do not need to run this operation after the deletion failed. You must make sure that the certificate is not being used by any resources, and then you can try again to delete the certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CertificateInner object */ public Observable<CertificateInner> cancelDeletionAsync(String resourceGroupName, String accountName, String certificateName) { return cancelDeletionWithServiceResponseAsync(resourceGroupName, accountName, certificateName).map(new Func1<ServiceResponseWithHeaders<CertificateInner, CertificateCancelDeletionHeaders>, CertificateInner>() { @Override public CertificateInner call(ServiceResponseWithHeaders<CertificateInner, CertificateCancelDeletionHeaders> response) { return response.body(); } }); } /** * Cancels a failed deletion of a certificate from the specified account. * If you try to delete a certificate that is being used by a pool or compute node, the status of the certificate changes to deleteFailed. If you decide that you want to continue using the certificate, you can use this operation to set the status of the certificate back to active. If you intend to delete the certificate, you do not need to run this operation after the deletion failed. You must make sure that the certificate is not being used by any resources, and then you can try again to delete the certificate. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @param accountName The name of the Batch account. * @param certificateName The identifier for the certificate. This must be made up of algorithm and thumbprint separated by a dash, and must match the certificate data in the request. For example SHA1-a3d1c5. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the CertificateInner object */ public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateCancelDeletionHeaders>> cancelDeletionWithServiceResponseAsync(String resourceGroupName, String accountName, String certificateName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (accountName == null) { throw new IllegalArgumentException("Parameter accountName is required and cannot be null."); } if (certificateName == null) { throw new IllegalArgumentException("Parameter certificateName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.cancelDeletion(resourceGroupName, accountName, certificateName, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<CertificateInner, CertificateCancelDeletionHeaders>>>() { @Override public Observable<ServiceResponseWithHeaders<CertificateInner, CertificateCancelDeletionHeaders>> call(Response<ResponseBody> response) { try { ServiceResponseWithHeaders<CertificateInner, CertificateCancelDeletionHeaders> clientResponse = cancelDeletionDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponseWithHeaders<CertificateInner, CertificateCancelDeletionHeaders> cancelDeletionDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<CertificateInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<CertificateInner>() { }.getType()) .registerError(CloudException.class) .buildWithHeaders(response, CertificateCancelDeletionHeaders.class); } /** * Lists all of the certificates in the specified account. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;CertificateInner&gt; object if successful. */ public PagedList<CertificateInner> listByBatchAccountNext(final String nextPageLink) { ServiceResponse<Page<CertificateInner>> response = listByBatchAccountNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<CertificateInner>(response.body()) { @Override public Page<CertificateInner> nextPage(String nextPageLink) { return listByBatchAccountNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Lists all of the certificates in the specified account. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<CertificateInner>> listByBatchAccountNextAsync(final String nextPageLink, final ServiceFuture<List<CertificateInner>> serviceFuture, final ListOperationCallback<CertificateInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByBatchAccountNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<CertificateInner>>>>() { @Override public Observable<ServiceResponse<Page<CertificateInner>>> call(String nextPageLink) { return listByBatchAccountNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Lists all of the certificates in the specified account. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;CertificateInner&gt; object */ public Observable<Page<CertificateInner>> listByBatchAccountNextAsync(final String nextPageLink) { return listByBatchAccountNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<CertificateInner>>, Page<CertificateInner>>() { @Override public Page<CertificateInner> call(ServiceResponse<Page<CertificateInner>> response) { return response.body(); } }); } /** * Lists all of the certificates in the specified account. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;CertificateInner&gt; object */ public Observable<ServiceResponse<Page<CertificateInner>>> listByBatchAccountNextWithServiceResponseAsync(final String nextPageLink) { return listByBatchAccountNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<CertificateInner>>, Observable<ServiceResponse<Page<CertificateInner>>>>() { @Override public Observable<ServiceResponse<Page<CertificateInner>>> call(ServiceResponse<Page<CertificateInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByBatchAccountNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Lists all of the certificates in the specified account. * ServiceResponse<PageImpl<CertificateInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;CertificateInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<CertificateInner>>> listByBatchAccountNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listByBatchAccountNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<CertificateInner>>>>() { @Override public Observable<ServiceResponse<Page<CertificateInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<CertificateInner>> result = listByBatchAccountNextDelegate(response); return Observable.just(new ServiceResponse<Page<CertificateInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<CertificateInner>> listByBatchAccountNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<CertificateInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<CertificateInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } }
/* Copyright (c) 2011 Danish Maritime Authority. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.maritimecloud.internal.message; /** * Various binary utility methods. No methods perform any kind of bound checks. * * @author Kasper Nielsen */ public class BinaryUtil { /** * Reads a int using big-endian convention from the specified offset. * * @param bytes * The array to read from * @param offset * the position to start reading from * @return the integer corresponding to the 4 bytes that was read */ public static int readInt(byte[] bytes, int offset) { return (bytes[offset] << 24) + ((bytes[offset + 1] & 0xff) << 16) + ((bytes[offset + 2] & 0xff) << 8) + (bytes[offset + 3] & 0xff); } /** * Convert an array of bytes into an array of ints. 4 bytes from the input data map to a single int in the output * data. * * @param bytes * The data to read from. * @return An array of integers corresponding to the specified byte array * @throws IllegalArgumentException * if the length of the array is not divisible by 4 */ public static int[] readInts(byte[] bytes) { if ((bytes.length & 3) != 0) { // & 3 = % 4 throw new IllegalArgumentException("Number of bytes must be a multiple of 4."); } int[] ints = new int[bytes.length >> 2]; for (int i = 0; i < ints.length; i++) { ints[i] = readInt(bytes, i << 2); } return ints; } /** * Writes 4 bytes containing the given int value. The conversion is done using the big-endian convention. * * @param value * the value to convert * @param offset * the offset in the byte array to write the int * @param bytes * the array to write the value into * @return the specified byte array */ public static byte[] writeInt(int value, byte[] bytes, int offset) { bytes[offset] = (byte) (value >>> 24); bytes[offset + 1] = (byte) (value >>> 16); bytes[offset + 2] = (byte) (value >>> 8); bytes[offset + 3] = (byte) value; return bytes; } /** * Writes 4 bytes containing the given int value. The conversion is done using the big-endian convention. * * @param value * the value to convert * @param offset * the offset in the byte array to write the int * @param bytes * the array to write the value into * @return the specified byte array */ public static byte[] writeLong(long value, byte[] bytes, int offset) { bytes[offset] = (byte) (value >>> 56); bytes[offset + 1] = (byte) (value >>> 48); bytes[offset + 2] = (byte) (value >>> 40); bytes[offset + 3] = (byte) (value >>> 32); bytes[offset + 4] = (byte) (value >>> 24); bytes[offset + 5] = (byte) (value >>> 16); bytes[offset + 6] = (byte) (value >>> 8); bytes[offset + 7] = (byte) value; return bytes; } /** * Reads a long using big-endian convention from the specified offset. * * @param bytes * The array to read from * @param offset * the position to start reading from * @return the long corresponding to the 4 bytes that was read */ public static long readLong(byte[] bytes, int offset) { return ((long)bytes[offset] << 56) + (((long)bytes[offset + 1] & 0xff) << 48) + (((long)bytes[offset + 2] & 0xff) << 40) + (((long)bytes[offset + 3] & 0xff) << 32) + (((long)bytes[offset + 4] & 0xff) << 24) + (((long)bytes[offset + 5] & 0xff) << 16) + (((long)bytes[offset + 6] & 0xff) << 8) + ((long)bytes[offset + 7] & 0xff); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.github.oasis.core.external; import io.github.oasis.core.Event; import java.util.List; import java.util.Optional; /** * Base interface to implement for storage of individual event data. * This interface will be used by engine to store some of events based * on the rule type to refer them later in execution process. * * @author Isuru Weerarathna */ public interface EventReadWriteHandler { Optional<Event> read(String contextRef, String eventId); List<Event> bulkRead(String contextRed, String... eventIds); boolean write(String contextRef, Event event); boolean remove(String contextRef, String... eventIds); }
package com.didi.hummer.component.list; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.StaggeredGridLayoutManager; import java.util.Arrays; /** * Created by XiaoFeng on 2021/4/19. */ public class ListUtil { public static boolean isVertical(RecyclerView.LayoutManager layoutManager) { if (layoutManager instanceof LinearLayoutManager) { return ((LinearLayoutManager) layoutManager).getOrientation() == LinearLayoutManager.VERTICAL; } else if (layoutManager instanceof StaggeredGridLayoutManager) { return ((StaggeredGridLayoutManager) layoutManager).getOrientation() == StaggeredGridLayoutManager.VERTICAL; } return true; } public static int getFirstVisibleItemPosition(RecyclerView.LayoutManager layoutManager) { if (layoutManager instanceof LinearLayoutManager) { return ((LinearLayoutManager) layoutManager).findFirstVisibleItemPosition(); } else if (layoutManager instanceof StaggeredGridLayoutManager) { StaggeredGridLayoutManager manager = (StaggeredGridLayoutManager) layoutManager; int[] first = new int[manager.getSpanCount()]; manager.findFirstVisibleItemPositions(first); Arrays.sort(first); return first[0]; } return 0; } public static int getLastVisibleItemPosition(RecyclerView.LayoutManager layoutManager) { if (layoutManager instanceof LinearLayoutManager) { return ((LinearLayoutManager) layoutManager).findLastVisibleItemPosition(); } else if (layoutManager instanceof StaggeredGridLayoutManager) { StaggeredGridLayoutManager manager = (StaggeredGridLayoutManager) layoutManager; int[] last = new int[manager.getSpanCount()]; manager.findLastVisibleItemPositions(last); Arrays.sort(last); return last[last.length - 1]; } return 0; } }
/* * Copyright 2002-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.socket.client.jetty; import java.net.URI; import java.util.Arrays; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.websocket.servlet.ServletUpgradeRequest; import org.eclipse.jetty.websocket.servlet.ServletUpgradeResponse; import org.eclipse.jetty.websocket.servlet.WebSocketCreator; import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.springframework.util.CollectionUtils; import org.springframework.util.SocketUtils; import org.springframework.web.socket.WebSocketHandler; import org.springframework.web.socket.WebSocketSession; import org.springframework.web.socket.adapter.JettyWebSocketHandlerAdapter; import org.springframework.web.socket.adapter.JettyWebSocketSession; import org.springframework.web.socket.adapter.TextWebSocketHandlerAdapter; import org.springframework.web.socket.support.WebSocketHttpHeaders; import static org.junit.Assert.*; /** * Tests for {@link JettyWebSocketClient}. * @author Rossen Stoyanchev */ public class JettyWebSocketClientTests { private JettyWebSocketClient client; private TestJettyWebSocketServer server; private String wsUrl; private WebSocketSession wsSession; @Before public void setup() throws Exception { int port = SocketUtils.findAvailableTcpPort(); this.server = new TestJettyWebSocketServer(port, new TextWebSocketHandlerAdapter()); this.server.start(); this.client = new JettyWebSocketClient(); this.client.start(); this.wsUrl = "ws://localhost:" + port + "/test"; } @After public void teardown() throws Exception { this.wsSession.close(); this.client.stop(); this.server.stop(); } @Test public void doHandshake() throws Exception { WebSocketHttpHeaders headers = new WebSocketHttpHeaders(); headers.setSecWebSocketProtocol(Arrays.asList("echo")); this.wsSession = this.client.doHandshake(new TextWebSocketHandlerAdapter(), headers, new URI(this.wsUrl)).get(); assertEquals(this.wsUrl, this.wsSession.getUri().toString()); assertEquals("echo", this.wsSession.getAcceptedProtocol()); } private static class TestJettyWebSocketServer { private final Server server; public TestJettyWebSocketServer(int port, final WebSocketHandler webSocketHandler) { this.server = new Server(); ServerConnector connector = new ServerConnector(this.server); connector.setPort(port); this.server.addConnector(connector); this.server.setHandler(new org.eclipse.jetty.websocket.server.WebSocketHandler() { @Override public void configure(WebSocketServletFactory factory) { factory.setCreator(new WebSocketCreator() { @Override public Object createWebSocket(ServletUpgradeRequest req, ServletUpgradeResponse resp) { if (!CollectionUtils.isEmpty(req.getSubProtocols())) { resp.setAcceptedSubProtocol(req.getSubProtocols().get(0)); } JettyWebSocketSession session = new JettyWebSocketSession(null, null); return new JettyWebSocketHandlerAdapter(webSocketHandler, session); } }); } }); } public void start() throws Exception { this.server.start(); } public void stop() throws Exception { this.server.stop(); } } }
package com.example.gsyvideoplayer; import android.Manifest; import android.content.Intent; import android.os.Build; import android.os.Bundle; import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import android.view.View; import android.widget.Button; import android.widget.Toast; import com.example.gsyvideoplayer.simple.SimpleActivity; import com.example.gsyvideoplayer.utils.JumpUtils; import com.shuyu.gsyvideoplayer.GSYVideoManager; import com.shuyu.gsyvideoplayer.utils.Debuger; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.OnClick; import permissions.dispatcher.PermissionUtils; public class MainActivity extends AppCompatActivity { @BindView(R.id.open_btn) Button openBtn; @BindView(R.id.open_btn_empty) Button openBtn2; final String[] permissions = {Manifest.permission.WRITE_EXTERNAL_STORAGE}; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); Debuger.enable(); ButterKnife.bind(this); boolean hadPermission = PermissionUtils.hasSelfPermissions(this, permissions); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && !hadPermission) { String[] permissions = {Manifest.permission.WRITE_EXTERNAL_STORAGE}; requestPermissions(permissions, 1110); } } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); boolean sdPermissionResult = PermissionUtils.verifyPermissions(grantResults); if (!sdPermissionResult) { Toast.makeText(this, "没获取到sd卡权限,无法播放本地视频哦", Toast.LENGTH_LONG).show(); } } @OnClick({R.id.open_btn, R.id.list_btn, R.id.list_btn_2, R.id.list_detail, R.id.clear_cache, R.id.recycler, R.id.recycler_2, R.id.list_detail_list, R.id.web_detail, R.id.danmaku_video, R.id.fragment_video, R.id.more_type, R.id.input_type, R.id.open_btn_empty, R.id.open_control, R.id.open_filter, R.id.open_btn_pick, R.id.open_btn_auto, R.id.open_scroll, R.id.open_window, R.id.open_btn_ad, R.id.open_btn_multi, R.id.open_btn_ad2, R.id.open_list_ad, R.id.open_custom_exo, R.id.open_simple, R.id.open_switch, R.id.media_codec}) public void onClick(View view) { switch (view.getId()) { case R.id.open_simple: //简单的播放 startActivity(new Intent(this, SimpleActivity.class)); break; case R.id.open_btn: //直接一个页面播放的 JumpUtils.goToVideoPlayer(this, openBtn); break; case R.id.list_btn: //普通列表播放,只支持全屏,但是不支持屏幕重力旋转,滑动后不持有 JumpUtils.goToVideoPlayer(this); break; case R.id.list_btn_2: //支持全屏重力旋转的列表播放,滑动后不会被销毁 JumpUtils.goToVideoPlayer2(this); break; case R.id.recycler: //recycler的demo JumpUtils.goToVideoRecyclerPlayer(this); break; case R.id.recycler_2: //recycler的demo JumpUtils.goToVideoRecyclerPlayer2(this); break; case R.id.list_detail: //支持旋转全屏的详情模式 JumpUtils.goToDetailPlayer(this); break; case R.id.list_detail_list: //播放一个连续列表 JumpUtils.goToDetailListPlayer(this); break; case R.id.web_detail: //正常播放,带preview JumpUtils.gotoWebDetail(this); break; case R.id.danmaku_video: //播放一个弹幕视频 JumpUtils.gotoDanmaku(this); break; case R.id.fragment_video: //播放一个弹幕视频 JumpUtils.gotoFragment(this); break; case R.id.more_type: //跳到多类型详情播放器,比如切换分辨率,旋转等 JumpUtils.gotoMoreType(this); break; case R.id.input_type: JumpUtils.gotoInput(this); break; case R.id.open_btn_empty: JumpUtils.goToPlayEmptyControlActivity(this, openBtn2); break; case R.id.open_control: JumpUtils.gotoControl(this); break; case R.id.open_filter: JumpUtils.gotoFilter(this); break; case R.id.open_btn_pick: //无缝切换 JumpUtils.goToVideoPickPlayer(this, openBtn); break; case R.id.open_btn_auto: //列表自动播放 JumpUtils.goToAutoVideoPlayer(this); break; case R.id.open_scroll: //列表自动播放 JumpUtils.goToScrollDetailPlayer(this); break; case R.id.open_window: //多窗体下的悬浮窗 JumpUtils.goToScrollWindow(this); break; case R.id.open_btn_ad: //广告 JumpUtils.goToVideoADPlayer(this); break; case R.id.open_btn_multi: //多个同时播放 JumpUtils.goToMultiVideoPlayer(this); break; case R.id.open_btn_ad2: //多个同时播放 JumpUtils.goToVideoADPlayer2(this); break; case R.id.open_list_ad: //多个同时播放 JumpUtils.goToADListVideoPlayer(this); break; case R.id.open_custom_exo: //多个同时播放 JumpUtils.goToDetailExoListPlayer(this); break; case R.id.open_switch: JumpUtils.goToSwitch(this); break; case R.id.media_codec: JumpUtils.goMediaCodec(this); break; case R.id.clear_cache: //清理缓存 GSYVideoManager.instance().clearAllDefaultCache(MainActivity.this); //String url = "https://res.exexm.com/cw_145225549855002"; //GSYVideoManager.clearDefaultCache(MainActivity.this, url); break; } } }
/* * Copyright 2017 * Ubiquitous Knowledge Processing (UKP) Lab and FG Language Technology * Technische Universität Darmstadt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.tudarmstadt.ukp.clarin.webanno.api.annotation.rendering.model; public enum VCommentType { INFO, ERROR, YIELD }
/* * This file is part of SpongeAPI, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.api.event.entity.player; import org.spongepowered.api.entity.player.Player; import org.spongepowered.api.event.entity.living.human.HumanLevelChangeEvent; /** * Called when a {@link Player}'s level is changed. */ public interface PlayerLevelChangeEvent extends HumanLevelChangeEvent, PlayerEvent { }
package com.hyz.weather.view; import com.hyz.weather.reSwing.HJButton; import com.hyz.weather.reSwing.HJLabel; import com.hyz.weather.reSwing.HJPanel; import com.hyz.weather.reSwing.Icons; import javax.swing.*; import java.awt.*; public class RegionPanel extends HJPanel { private HJLabel location; private JButton list,refresh; public RegionPanel(){ super(); this.setOpaque(false); // list=new HJButton(Icons.LIST); list=new HJButton(Icons.LIST_BLACK); list.setBounds(0,0,Icons.LIST.getIconWidth(),Icons.LIST.getIconHeight()); location=new HJLabel("",JLabel.CENTER); // refresh=new HJButton(Icons.REFRESH); refresh=new HJButton(Icons.REFRESH_BLACK); refresh.setBounds(0,0,Icons.REFRESH.getIconWidth(),Icons.REFRESH.getIconHeight()); this.setLayout(new BorderLayout()); this.add(list,BorderLayout.WEST); this.add(location,BorderLayout.CENTER); this.add(refresh,BorderLayout.EAST); } public boolean setData(String region){ location.setText(region); return true; } public JButton getList() { return list; } public JButton getRefresh() { return refresh; } }
package monitor; /** * Vertex */ public class Vertex { private String name; private String uid; private String type; private float load; /** * Constructor. * @param name * @param uid */ public Vertex(String name, String uid){ this.name = name; this.uid = uid; this.load = 1.0f; } /** * Get name * @return */ public String getName(){ return this.name; } /** * Get uid. * @return */ public String getUid(){ return this.uid; } /** * Gettype * @return */ public String getType(){ return this.type; } /** * Get load. * @return */ public float getLoad(){ return this.load; } /** * Set load. * @param load */ public void setLoad(float load){ this.load = load; } }
package ca.uhn.fhir.jpa.bulk.imprt.job; /*- * #%L * HAPI FHIR JPA Server * %% * Copyright (C) 2014 - 2021 Smile CDR, Inc. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import ca.uhn.fhir.jpa.batch.config.BatchConstants; import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.StepExecutionListener; import org.springframework.batch.core.jsr.RetryListener; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.retry.ExhaustedRetryException; import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; /** * This class sets the job status to ERROR if any failures occur while actually * generating the export files. */ public class BulkImportStepListener implements StepExecutionListener, RetryListener { @Autowired private IBulkDataImportSvc myBulkDataImportSvc; @Override public void beforeStep(@Nonnull StepExecution stepExecution) { // nothing } @Override public ExitStatus afterStep(StepExecution theStepExecution) { if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) { //Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context. String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BatchConstants.JOB_UUID_PARAMETER); if (jobUuid == null) { jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BatchConstants.JOB_UUID_PARAMETER); } assert isNotBlank(jobUuid); StringBuilder message = new StringBuilder(); message.append("Job: ").append(theStepExecution.getExecutionContext().getString(BulkImportPartitioner.JOB_DESCRIPTION)).append("\n"); message.append("File: ").append(theStepExecution.getExecutionContext().getString(BulkImportPartitioner.FILE_DESCRIPTION)).append("\n"); for (Throwable next : theStepExecution.getFailureExceptions()) { if (next instanceof ExhaustedRetryException) { next = next.getCause(); // ExhaustedRetryException is a spring exception that wraps the real one } String nextErrorMessage = next.toString(); message.append("Error: ").append(nextErrorMessage).append("\n"); } theStepExecution.addFailureException(new RuntimeException(message.toString())); myBulkDataImportSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.ERROR, message.toString()); ExitStatus exitStatus = ExitStatus.FAILED.addExitDescription(message.toString()); theStepExecution.setExitStatus(exitStatus); // Replace the built-in error message with a better one return exitStatus; } return theStepExecution.getExitStatus(); } }
package fi.omat.johneagle.filebox.repository; import org.springframework.data.jpa.repository.JpaRepository; import java.util.List; import fi.omat.johneagle.filebox.domain.entities.Account; /** * Account database table interface for JPA. */ public interface AccountRepository extends JpaRepository<Account, Long> { Account findByUsername(String username); Account findByNickname(String nickname); List<Account> findAllByFirstNameContainingIgnoreCaseOrLastNameContainingIgnoreCase(String firstName, String lastName); }
/** * This software may be modified and distributed under the terms * of the MIT license. See the LICENSE file for details. * <p> * Copyright (c) 2019 Miss Amelia Sara (Millie) <me@missameliasara.com> * Copyright (c) 2019 Penoaks Publishing LLC <development@penoaks.com> * <p> * All Rights Reserved. */ package jpos; public abstract interface BumpBarControl14 extends BumpBarControl13 { }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.luke.app.desktop.util; import java.util.Objects; /** Utilities for handling strings */ public class StringUtils { public static boolean isNullOrEmpty(String s) { return Objects.isNull(s) || s.equals(""); } private StringUtils() {} }
package com.xmbl.controller.ali; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONObject; import com.alipay.api.AlipayApiException; import com.alipay.api.AlipayClient; import com.alipay.api.DefaultAlipayClient; import com.alipay.api.domain.AlipayFundTransToaccountTransferModel; import com.alipay.api.domain.AlipayTradeAppPayModel; import com.alipay.api.domain.ExtendParams; import com.alipay.api.request.AlipayFundTransOrderQueryRequest; import com.alipay.api.request.AlipayFundTransToaccountTransferRequest; import com.alipay.api.request.AlipayTradeWapPayRequest; import com.alipay.api.response.AlipayFundTransOrderQueryResponse; import com.alipay.api.response.AlipayFundTransToaccountTransferResponse; import com.xmbl.base.BaseController; import com.xmbl.constant.AlipayConfig; import com.xmbl.constant.CommonConstant; import com.xmbl.model.ThirdPayBean; import com.xmbl.service.pay.BankService; import com.xmbl.service.pay.ThirdPayService; import com.xmbl.service.pay.TransferRecordService; import com.xmbl.service.user.AppUserService; import com.xmbl.util.DateUtils; import com.xmbl.util.OrderGeneratedUtils; import com.xmbl.web.api.bean.Response; import com.xmbl.web.api.bean.Route; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.util.Assert; import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletRequest; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.Map; /** * Copyright © 2018 noseparte © BeiJing BoLuo Network Technology Co. Ltd. * * @Author Noseparte * @Compile 2018年7月2日 -- 下午4:07:36 * @Version 1.0 * @Description */ @Slf4j @RestController @RequestMapping(value = Route.PATH + Route.Payment.PATH) public class AliPayController extends BaseController { @Autowired private AppUserService appUserService; @Autowired private BankService bankService; @Autowired private ThirdPayService thirdPayService; @Autowired private TransferRecordService transferRecordService; /** * 支付宝请求交易 * * @param userId * @param amount * @return */ @RequestMapping(value = Route.Payment.ALI_PAY, method = RequestMethod.POST) public Response ali_pay_h(@RequestParam("userId") int userId, @RequestParam("amount") String amount) { log.info("infoMsg:--- 支付宝请求交易开始"); try { //----------------请求参数------------------// ThirdPayBean payBean = thirdPayService.findByPayId(CommonConstant.ALIPAY_WAYTD); // Assert.notNull(payBean); String notify_url = payBean.getReturn_url(); String APP_ID = payBean.getMer_no(); //支付宝商戶号 // String mer_key = payBean.getMer_key(); //支付宝公钥|商户私钥 ExtendParams extendParams = new ExtendParams(); extendParams.setSysServiceProviderId(userId + "|" + 18); //拓展参数 //实例化客户端 AlipayClient alipayClient = new DefaultAlipayClient(AlipayConfig.gatewayUrl, AlipayConfig.app_id, AlipayConfig.APP_PRIVATE_KEY, AlipayConfig.format, AlipayConfig.charset, AlipayConfig.ALIPAY_PUBLIC_KEY, AlipayConfig.sign_type); AlipayTradeWapPayRequest alipay_request = new AlipayTradeWapPayRequest(); AlipayTradeAppPayModel model = new AlipayTradeAppPayModel(); model.setSubject("循心币"); String orderNo = OrderGeneratedUtils.getOrderNo(); model.setOutTradeNo(orderNo); model.setTimeoutExpress("30m"); model.setTotalAmount(amount); model.setProductCode("QUICK_WAP_WAY"); model.setExtendParams(extendParams); alipay_request.setBizModel(model); // 设置异步通知地址 alipay_request.setNotifyUrl(notify_url); // 设置同步地址 alipay_request.setReturnUrl("www.xunxinkeji.cn://"); String form = alipayClient.pageExecute(alipay_request).getBody(); //生成账单 boolean result = transferRecordService.generatedBills(orderNo, extendParams); if (!result) { log.error("订单生成失败"); } log.info("infoMsg:--- 支付宝请求交易结束"); return new Response().success(form); } catch (AlipayApiException e) { log.error("errorMsg:--- 支付宝请求交易失败" + e.getMessage()); return new Response().failure(e.getMessage()); } } /** * 支付宝请求交易 * * @param userId * @param amount * @return */ @RequestMapping(value = "/ali_pay_h", method = RequestMethod.POST) public Response ali_pay_h5(@RequestParam("userId") int userId, @RequestParam("amount") String amount) { log.info("infoMsg:--- 支付宝请求交易开始"); try { //----------------请求参数------------------// // ThirdPayBean payBean = thirdPayService.findByPayId(18); // Assert.notNull(payBean); String notify_url = "www.xunxinkeji.cn://api/pay/ali_pay_notify"; String APP_ID = "wxb927d2d754073085"; //支付宝商戶号 // String mer_key = payBean.getMer_key(); //支付宝公钥|商户私钥 String ALIPAY_PUBLIC_KEY = new String("MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsbwAZ/90rKQ1MptCdlS96TqYOJa2ypyM4EOjmFYsb2wSgtxNpmWKip6d2Tcobz9jjZvSmalB2RfWBKO82VSyRBLdn3CKdnUtGRMNUN5o3ayElmhWKDzf3LiuP0RWjyXXhMe4ldlXrbYX6ZjnUxmXGLdD1B++yj1hRsZUyScZYdaXb64hxbq4e4GdezGmSE0aRI3ajqOe2DBgbwbJwMKdybp+5iodgc6fag86cYueQ67CpS4BqWyF8rclLvyJUd44VfP1xgxrWpLJVc7gpXsvXDMlTWwM4CPw3OuzwkYuUTDzVGKRenbZRJkFi4FDfKTaBGgiydm39NKs4pJKBQz2KQIDAQAB"); String APP_PRIVATE_KEY = new String("MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCd9ISQRnsfWSWLvEMOsazWbP3J2qtwqm82PKbUggDP4upB+tliPJ1afaswrJRNL7rTW/WEGKbXUOWjfq3RuAJoj0qfxpFhb3HIpZqnkqhyx0u8KQut4DZI13nJ2nK+Ixxbg7zThKQAXfVAr3xx1PhKP5Pceyxz3AcZOzRUxJCzF2YR6TG1qbk8b0cOah9pTtM4tsmKXEo0Uldm+rEb7VBI8Fb7uXp8vunyEGkTgJK4eSZDVkEJ1rdydEy78cj8Ix7oQuO6/nNLfcEtYajiNicTsEOoM2Ane/7S3V1veTCqbVb1agSj7s+lleFP6lcgqReNgk0phf3BpVYnM2XY88epAgMBAAECggEAI5OUXA6T4q6ohz2i+OdJ343y54kJ/jlVDSlCBjE1z5zzWGMQnVC7vEr7yN3GFVB/yuU2ekc3JN4Cqv14VvkUCcrkavJFgmWgginSmJWuvRNoWnwANmx+rY9izfUWzP6Jf48/c4C3k6GWALjF1bm7JrYFLq7Lh1jyfFtaYRFY6g0riaAhPKS/RY1L1tNqzOoS2rge7z9qJJFyDpZM0Lu6pdI9/LvVHS5j6o7WIrqb7gsKgS267uAMFqXwRcyq/P/oTj0wbrlAtgDOfoHAmITbue2jE/oAK18vswefXud5IiWpIKFRRiDNz8UqAUPgmXdEUAVJqgzgTiMHlzKQt7zTZQKBgQDJOLJaLEpRtqGwEhTVzy2r6mnlG29TuapV7Esw3wSqHwx8vLdb0VtgTPq32i1W7BE0p6ZHPkDDOKURRHwCz2as5fh+Udt/YNbTj5nHhudFZ0KULJ0OsPDslx5pe5H1FBPBXPcekzDfwnGOFNOH75gcrAsTiM9XaktOtKRxelRXUwKBgQDI9I10i/UBrezIdo/pz5mjrRSNCRlNVUoMRBnXWeoHqAuAfxxy+TuYVdTelVn8ZgNy7lt+/w9j64XSHMq5S29eJg+FuOKV3HBtiGjxZOz1blEfF3b1pIxNJNAZJPOzpGJNmYQw+rt5s75wXA9n/q/us3HADClMVLFAczKJAt5xkwKBgCHvwvy8TYh8gcZ9NjBdMbm13kg6mUsInDbDlGbYpiO++s8q0M3WgE+8i+hoDo+DXt9/iuanFCsYqZZA851Rt2JfospDKf7QqUqjBG+HTAgDg1IUOCTbKLbuQb3Ojm5EBZTuBeuNLYf/dkFdN9PMT94+EdwojbeTgMH0a2uMEx9rAoGAYW2fv2ezq9LFQBOrhnJuTNq3YgGNUN8O/Y9u7+fZ/UhN+0ilZGDNsfe7Mwc6D5LuDSTfG11R+uHPiaUH7HpUTlMpp22R/ZJYt+Iw7wg9kmifz/EybboPg79bXTV7KheCyZiqbIzDpCevJw6bMZJbfeFmPvQmeal+Hn87ew33Bx0CgYEAkLJ/KSeRbco/jvgzg7G5PHaZbRgrXVmBpfHCsXt/bAqEb0ea2hAG/tJ8yw+8zGG0R8WORZoojWqKfDTjm+0bR+YQxa3HJ/1El9A9jixPgpS+yf9dwBRyCNr2hTepzoaCWAZy5Cialn5FPfSHPJHkNaHPjtxWPihb1s0BzZb3I1s="); ExtendParams extendParams = new ExtendParams(); extendParams.setSysServiceProviderId(userId + "|" + 18); //拓展参数 //实例化客户端 AlipayClient alipayClient = new DefaultAlipayClient(AlipayConfig.gatewayUrl, APP_ID, APP_PRIVATE_KEY, "JSON", "UTF-8", ALIPAY_PUBLIC_KEY, "RSA2"); AlipayTradeWapPayRequest alipay_request = new AlipayTradeWapPayRequest(); AlipayTradeAppPayModel model = new AlipayTradeAppPayModel(); model.setSubject("循心币"); String orderNo = OrderGeneratedUtils.getOrderNo(); model.setOutTradeNo(orderNo); model.setTimeoutExpress("30m"); model.setTotalAmount(amount); model.setProductCode("QUICK_WAP_WAY"); model.setExtendParams(extendParams); alipay_request.setBizModel(model); // 设置异步通知地址 alipay_request.setNotifyUrl(notify_url); // 设置同步地址 alipay_request.setReturnUrl("www.xunxinkeji.cn://"); String form = alipayClient.pageExecute(alipay_request).getBody(); //生成账单 // boolean result = rechargeRecordService.generatedBills(orderNo,extendParams); if (!false) { log.error("订单生成失败"); } log.info("infoMsg:--- 支付宝请求交易结束"); return new Response().success(form); } catch (AlipayApiException e) { log.error("errorMsg:--- 支付宝请求交易失败" + e.getMessage()); return new Response().failure(e.getMessage()); } } /** * 支付宝支付通知地址 * * @param request * @return */ @RequestMapping(value = Route.Payment.ALI_PAY_NOTIFY, method = RequestMethod.POST) public Response alipay_notify(HttpServletRequest request) { log.info("infoMsg:--- 支付宝验证异步通知信息开始"); //----------------请求参数------------------// ThirdPayBean payBean = thirdPayService.findByPayId(CommonConstant.ALIPAY_WAYTD); String mer_key = payBean.getMer_key(); //支付宝公钥|商户私钥 String public_key = mer_key.split("|")[0]; // String ALIPAY_PUBLIC_KEY = MD5_UTIL.convertMD5(MD5_UTIL.convertMD5(public_key)); //获取支付宝POST过来反馈信息 Map<String, String> params = new HashMap<String, String>(); Map requestParams = request.getParameterMap(); log.info("infoMsg:--- 支付返回的信息" + requestParams); for (Iterator iter = requestParams.keySet().iterator(); iter.hasNext(); ) { String name = (String) iter.next(); String[] values = (String[]) requestParams.get(name); String valueStr = ""; for (int i = 0; i < values.length; i++) { valueStr = (i == values.length - 1) ? valueStr + values[i] : valueStr + values[i] + ","; } //乱码解决,这段代码在出现乱码时使用。 //valueStr = new String(valueStr.getBytes("ISO-8859-1"), "utf-8"); params.put(name, valueStr); } // boolean flag = AlipaySignature.rsaCheckV1(params, ALIPAY_PUBLIC_KEY, "utf-8","RSA2"); log.info("infoMsg:--- 支付宝信息验证" + true); //TODO 账变,修改状态,到账提醒 Double amount = Double.parseDouble(params.get("total_amount")); log.info("infoMsg:--- 支付宝信息验证amount" + amount); // String passbackParams = params.get("passbackParams"); String order_no = params.get("out_trade_no"); log.info("infoMsg:--- 支付宝信息验证out_trade_no" + order_no); // boolean result = transferRecordService.updateBill(amount,order_no); // if(result) { // RechargeRecord rechargeRecord = rechargeRecordDao.findRechargeRecordByOrderNo(order_no); // UserAmountChangeRecord record = new UserAmountChangeRecord(); // record.setDirection(ExpConstants.INCOME); // record.setChangeType(""); // record.setTansferAmount(amount); // if(rechargeRecord != null && (Integer)rechargeRecord.getUserId() != null){ // UserEntity user = appUserService.findById(rechargeRecord.getUserId()); // Double order_before = user.getAmount(); //充值前余额 // Double order_end = order_before + amount; //充值后金额 // record.setChangeType(AmountConstants.RECHARGE); // record.setTansferBefore(order_before); // record.setTansferEnd(order_end); // record.setUserId(rechargeRecord.getUserId()); // userAmountChangeRecordService.save(record); // return resp.success("SUCCESS"); // } // } log.info("infoMsg:--- 支付宝验证异步通知信息结束"); return new Response().success("SUCCESS"); } /** * 支付宝单笔提现 * * @param //Id //流水ID * @param //PlayerId //用户ID * @param //AccountId //账号ID * @param //ServerId //服务器ID * @param //Amount //提现金额 * @param //PayeeType //提现类型 1:支付宝 2:微信 * @param //PayeeAccount //支付宝账号 * @param //PayeeRealName //真实姓名 * @param //PassWord //用户密码 * @return http://www.ugcapp.com:8028/paymentServer/api/pay/transfer */ @RequestMapping(value = Route.Payment.ALI_TRANSFER, method = RequestMethod.POST, produces = "text/plain;charset=UTF-8") public String transfer(@RequestBody String transferInfo) { log.info("infoMsg:--- 用户提现申请开始。======================" + DateUtils.formatDate(new Date(), "YYYY-mm-dd HH:mm:ss")); JSONObject transferResult = new JSONObject(); String order_id = ""; try { log.info("infoMsg:--- Game服的请求参数 transferInfo,{}======================", JSON.toJSONString(transferInfo)); //解析Game服的请求参数 transferInfo JSONObject gamePlayerWithDrawalObj = JSON.parseObject(transferInfo); Long Id = gamePlayerWithDrawalObj.getLong("Id"); //流水ID Long PlayerId = gamePlayerWithDrawalObj.getLong("PlayerId"); //用户ID String AccountId = gamePlayerWithDrawalObj.getString("AccountId"); //账号ID int ServerId = gamePlayerWithDrawalObj.getInteger("ServerId"); //服务器ID Float Amount = gamePlayerWithDrawalObj.getFloat("Amount"); //提现金额 int PayeeType = gamePlayerWithDrawalObj.getInteger("PayeeType"); //提现类型 1:支付宝 2:微信 String PayeeAccount = gamePlayerWithDrawalObj.getString("PayeeAccount"); //支付宝账号 String PayeeRealName = gamePlayerWithDrawalObj.getString("PayeeRealName"); //真实姓名 String PassWord = gamePlayerWithDrawalObj.getString("PassWord"); //用户密码 // 判断用户的支付密码是否正确 boolean validatePwd = appUserService.verifyPassword(AccountId, PassWord); if (!validatePwd) { // 发送服务器的参数 JSONObject Obj = new JSONObject(); Obj.put("Id", Id); Obj.put("PlayerId", PlayerId); Obj.put("AccountId", AccountId); Obj.put("ServerId", ServerId); Obj.put("Amount", Amount); Obj.put("PayeeType", PayeeType); Obj.put("PayeeAccount", PayeeAccount); Obj.put("PayeeRealName", PayeeRealName); Obj.put("PassWord", PassWord); Obj.put("Result", 11001); log.error("errorMsg:--- 提现用户输入的支付密码不匹配 Obj,{}======================", JSON.toJSONString(Obj)); return JSON.toJSONString(Obj); } //paymentServer端 保存提现流水 String remark = "方块创造于" + DateUtils.formatDate(new Date(), "YYYY-mm-dd HH:mm:ss") + ",申请提现" + Amount + "元" + "流水号为:" + Id; boolean result = transferRecordService.generatedBills(PlayerId, AccountId, Id, ServerId, Amount, CommonConstant.ALIPAY_WAYTD, PayeeAccount, PayeeRealName, CommonConstant.Unusual, remark); if (!result) { log.error("订单生成失败"); } //----------------请求参数------------------// ThirdPayBean payBean = thirdPayService.findByPayId(CommonConstant.ALIPAY_WAYTD); Assert.isTrue(payBean != null, "商户平台信息不能为空"); // String APP_ID = payBean.getMer_no(); //支付宝商戶号 // String mer_key = payBean.getMer_key(); //支付宝公钥|商户私钥 //实例化客户端 AlipayClient alipayClient = new DefaultAlipayClient(AlipayConfig.gatewayUrl, AlipayConfig.app_id, AlipayConfig.APP_PRIVATE_KEY, AlipayConfig.format, AlipayConfig.charset, AlipayConfig.ALIPAY_PUBLIC_KEY, AlipayConfig.sign_type); AlipayFundTransToaccountTransferRequest request = new AlipayFundTransToaccountTransferRequest(); AlipayFundTransToaccountTransferModel model = new AlipayFundTransToaccountTransferModel(); model.setOutBizNo(String.valueOf(Id)); model.setPayeeType("ALIPAY_LOGONID"); model.setPayeeAccount(PayeeAccount); model.setPayeeRealName(PayeeRealName); model.setAmount(String.valueOf(Amount)); model.setPayerShowName("方块创造"); model.setRemark(remark); request.setBizModel(model); log.info("=================此次提现申请的具体信息为:-------------" + JSON.toJSONString(model.toString())); AlipayFundTransToaccountTransferResponse response = alipayClient.execute(request); /** * 1. 如果商户重复请求转账,支付宝会幂等返回成功结果,商户必须对重复转账的业务做好幂等处理;如果不判断,存在潜在的风险,商户自行承担因此而产生的所有损失。 * 2. 如果调用alipay.fund.trans.toaccount.transfer掉单时, * 或返回结果code=20000时,或返回结果code=40004,sub_code= SYSTEM_ERROR时, * 请调用alipay.fund.trans.order.query发起查询,如果未查询到结果,请保持原请求不变再次请求alipay.fund.trans.toaccount.transfer接口。 * 3. 商户处理转账结果时,对于错误码的处理,只能使用sub_code作为后续处理的判断依据,不可使用sub_msg作为后续处理的判断依据。 */ transferResult.put("Id", Id); transferResult.put("PlayerId", PlayerId); transferResult.put("AccountId", AccountId); transferResult.put("ServerId", ServerId); transferResult.put("Amount", Amount); transferResult.put("PayeeType", PayeeType); transferResult.put("PayeeAccount", PayeeAccount); transferResult.put("PayeeRealName", PayeeRealName); transferResult.put("PassWord", PassWord); if (response.isSuccess()) { order_id = response.getOrderId(); log.info("infoMsg:============== 支付宝单笔提现接口调用成功 ===============,order_id,{}", order_id); if (response.getCode().equals("10000")) { //paymentServer执行更改订单状态操作 log.info("infoMsg:============== paymentServer执行更改订单状态操作. ==============="); //生成账单 String transferRemark = "方块创造于" + DateUtils.formatDate(new Date(), "YYYY-mm-dd HH:mm:ss") + ",提现成功" + Amount + "元" + "流水号为:" + Id; boolean updateState = transferRecordService.updateBill(String.valueOf(Id), transferRemark); if (updateState) { // 发送服务器的参数 transferResult.put("Result", 1); log.info("infoMsg:--- 用户提现申请成功。 transferResult,{}======================", JSON.toJSONString(transferResult)); } } else if (response.getCode().equals("20000") || response.getCode().equals("40004") || response.getSubCode().equals("SYSTEM_ERROR")) { AlipayClient alipayQueryClient = new DefaultAlipayClient(AlipayConfig.gatewayUrl, AlipayConfig.app_id, AlipayConfig.APP_PRIVATE_KEY, AlipayConfig.format, AlipayConfig.charset, AlipayConfig.ALIPAY_PUBLIC_KEY, AlipayConfig.sign_type); AlipayFundTransOrderQueryRequest requestQuery = new AlipayFundTransOrderQueryRequest(); StringBuffer buffer = new StringBuffer(); buffer.append("{"); buffer.append("\"out_biz_no\":").append(Id).append(","); buffer.append("\"order_id\":").append(order_id); buffer.append("}"); requestQuery.setBizContent(buffer.toString()); AlipayFundTransOrderQueryResponse responseQuery = alipayQueryClient.execute(requestQuery); if (responseQuery.isSuccess()) { log.info("infoMsg:============== 支付宝单笔提现查询接口调用成功 ==============="); // 发送服务器的参数 if (responseQuery.getCode().equals("10000")) { //生成账单 String transferRemark = "方块创造于" + DateUtils.formatDate(new Date(), "YYYY-mm-dd HH:mm:ss") + ",提现失败。失败原因为:" + responseQuery.getFailReason() + "提现金额为:" + Amount + "元" + "流水号为:" + Id; boolean updateState = transferRecordService.updateBill(String.valueOf(Id), transferRemark); if (updateState) { transferResult.put("Result", 11003); log.error("errorMsg:--- 用户提现申请失败。 transferResult,{}======================", JSON.toJSONString(transferResult)); } } } else { log.info("infoMsg:============== 支付宝单笔提现查询接口调用失败 ==============="); } } } else { log.info("infoMsg:============== 支付宝单笔提现查询接口系统错误 ==============="); transferResult.put("Result", 11003); } } catch (Exception e) { log.error("errorMsg:{======== 用户申请提现发生异常:======================= " + e.getMessage() + "---}"); } log.info("infoMsg:--- 用户提现申请结果。 transferResult,{}======================", JSON.toJSONString(transferResult)); return JSON.toJSONString(transferResult); } // public static void main(String[] args) { // try { // AlipayClient alipayClient = new DefaultAlipayClient(AlipayConfig.gatewayUrl,AlipayConfig.app_id,AlipayConfig.APP_PRIVATE_KEY,AlipayConfig.format,AlipayConfig.charset,AlipayConfig.ALIPAY_PUBLIC_KEY,AlipayConfig.sign_type); // AlipayFundTransToaccountTransferRequest request = new AlipayFundTransToaccountTransferRequest(); // AlipayFundTransToaccountTransferModel model = new AlipayFundTransToaccountTransferModel(); // model.setOutBizNo("100000000000019"); // model.setPayeeType("ALIPAY_LOGONID"); // model.setPayeeAccount("17610173685"); // model.setAmount("0.1"); // request.setBizModel(model); // // AlipayFundTransToaccountTransferResponse response = alipayClient.execute(request); // String body = response.getBody(); // log.info("提现response=================body,{}",body); // } catch (AlipayApiException e) { // e.printStackTrace(); // } // } }
import java.util.*; public class DS_Presentation { public static void main(String[] args) { System.out.println("____________________________________________________"); System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"); System.out.println("Students Prerequisite Subjects Managment System for Islamia University of Bahawal Pur "); System.out.println("****************************************************"); System.out.println("____________________________________________________"); System.out.println("Processing Please wait............\n\n"); Scanner read=new Scanner(System.in); int subjects; int code; int m=0; System.out.println("Enter Number of Subjects You want to Check Prerequisites :"); subjects=read.nextInt(); int [] DS1; DS1 = new int [subjects]; int [] DS2; DS2 = new int [subjects]; int [][] DS3; DS3 = new int[subjects][subjects]; System.out.println("____________________________________________________"); System.out.println("____________________________________________________"); System.out.println("Enter Course Code which you are studied :-"); System.out.println("Code look like 1101,1203,1304,1405 etc "); System.out.println("____________________________________________________"); System.out.println("____________________________________________________"); for(int i=0;i<=subjects-1;i++) { DS1[i]=read.nextInt(); } System.out.println("____________________________________________________"); System.out.println("____________________________________________________"); System.out.println("Enter Course Code which you want to study in this section :-"); System.out.println("Code look like 1301,1203,1304,1405 etc "); System.out.println("____________________________________________________"); System.out.println("____________________________________________________\n\n\n"); for(int j=0;j<=subjects-1;j++) { DS2[j]=read.nextInt(); } System.out.println("____________________________________________________"); System.out.println("____________________________________________________\n\n"); for(int a=0;a<=subjects-1;a++) { code=DS1[a]; code=code+100; for(int b=0;b<=subjects-1;b++) { if(code==DS2[b]) { System.out.println("Subject Matched "+DS1[a]+(" and ")+code); m++; } } } System.out.println("____________________________________________________"); System.out.println("____________________________________________________\n\n\n"); if(m==0) { System.out.println(":: Sorry We Can't Allocate You Subjects for this Semester ::\n"); System.out.println("____________________________________________________\n\n"); System.out.println("Processing Complete "); } if(m!=0) { System.out.println(":: We Allocate You "+m+(" Subjects For this Semester Codes are Given Before ::\n")); System.out.println("____________________________________________________\n\n"); System.out.println("Processing Complete "); } } }
package com.tecazuay.example.restapi.validations; import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidatorContext; import org.springframework.beans.factory.annotation.Autowired; import com.tecazuay.example.restapi.repositories.UsuarioRepository; public class CorreoExistConstraitValidator implements ConstraintValidator<CorreoExistConstrait, String> { @Autowired UsuarioRepository usuarioRepository; @Override public boolean isValid(String value, ConstraintValidatorContext context) { if (value == null) return false; return !usuarioRepository.findByCorreo(value).isPresent(); } }
/* * Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.application.authentication.framework.config.model.graph.js; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext; import org.wso2.carbon.identity.application.authentication.framework.internal.FrameworkServiceDataHolder; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser; import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants; import org.wso2.carbon.identity.core.util.IdentityTenantUtil; import org.wso2.carbon.user.api.UserRealm; import org.wso2.carbon.user.api.UserStoreException; import org.wso2.carbon.user.core.service.RealmService; import org.wso2.carbon.user.core.util.UserCoreUtil; /** * Javascript wrapper for Java level AuthenticatedUser. * This provides controlled access to AuthenticatedUser object via provided javascript native syntax. * e.g * var userName = context.lastAuthenticatedUser.username * <p> * instead of * var userName = context.getLastAuthenticatedUser().getUserName() * <p> * Also it prevents writing an arbitrary values to the respective fields, keeping consistency on runtime * AuthenticatedUser. * * @see AuthenticatedUser */ public class JsAuthenticatedUser extends AbstractJSObjectWrapper<AuthenticatedUser> { private static final Log LOG = LogFactory.getLog(JsAuthenticatedUser.class); private int step; private String idp; /** * Constructor to be used when required to access step specific user details. * * @param context Authentication context * @param wrappedUser Authenticated user * @param step Authentication step * @param idp Authenticated Idp */ public JsAuthenticatedUser(AuthenticationContext context, AuthenticatedUser wrappedUser, int step, String idp) { this(wrappedUser, step, idp); initializeContext(context); } /** * Constructor to be used when required to access step specific user details. * * @param wrappedUser Authenticated user * @param step Authentication step * @param idp Authenticated Idp */ public JsAuthenticatedUser(AuthenticatedUser wrappedUser, int step, String idp) { super(wrappedUser); this.step = step; this.idp = idp; } /** * Constructor to be used when required to access step independent user. * * @param wrappedUser Authenticated user */ public JsAuthenticatedUser(AuthenticatedUser wrappedUser) { super(wrappedUser); } public JsAuthenticatedUser(AuthenticationContext context, AuthenticatedUser wrappedUser) { this(wrappedUser); initializeContext(context); } @Override public Object getMember(String name) { switch (name) { case FrameworkConstants.JSAttributes.JS_AUTHENTICATED_SUBJECT_IDENTIFIER: return getWrapped().getAuthenticatedSubjectIdentifier(); case FrameworkConstants.JSAttributes.JS_USERNAME: return getWrapped().getUserName(); case FrameworkConstants.JSAttributes.JS_USER_STORE_DOMAIN: return getWrapped().getUserStoreDomain(); case FrameworkConstants.JSAttributes.JS_TENANT_DOMAIN: return getWrapped().getTenantDomain(); case FrameworkConstants.JSAttributes.JS_LOCAL_CLAIMS: if (StringUtils.isNotBlank(idp)) { return new JsClaims(getContext(), step, idp, false); } else { // Represent step independent user return new JsClaims(getContext(), getWrapped(), false); } case FrameworkConstants.JSAttributes.JS_REMOTE_CLAIMS: if (StringUtils.isNotBlank(idp)) { return new JsClaims(getContext(), step, idp, true); } else { // Represent step independent user return new JsClaims(getContext(), getWrapped(), true); } case FrameworkConstants.JSAttributes.JS_LOCAL_ROLES: return getLocalRoles(); case FrameworkConstants.JSAttributes.JS_CLAIMS: if (StringUtils.isNotBlank(idp)) { return new JsRuntimeClaims(getContext(), step, idp); } else { // Represent step independent user return new JsRuntimeClaims(getContext(), getWrapped()); } default: return super.getMember(name); } } @Override public void setMember(String name, Object value) { switch (name) { case FrameworkConstants.JSAttributes.JS_USERNAME: getWrapped().setUserName((String) value); break; case FrameworkConstants.JSAttributes.JS_USER_STORE_DOMAIN: getWrapped().setUserStoreDomain((String) value); break; case FrameworkConstants.JSAttributes.JS_TENANT_DOMAIN: getWrapped().setTenantDomain((String) value); break; default: super.setMember(name, value); } } @Override public boolean hasMember(String name) { switch (name) { case FrameworkConstants.JSAttributes.JS_AUTHENTICATED_SUBJECT_IDENTIFIER: return getWrapped().getAuthenticatedSubjectIdentifier() != null; case FrameworkConstants.JSAttributes.JS_USERNAME: return getWrapped().getUserName() != null; case FrameworkConstants.JSAttributes.JS_USER_STORE_DOMAIN: return getWrapped().getUserStoreDomain() != null; case FrameworkConstants.JSAttributes.JS_TENANT_DOMAIN: return getWrapped().getTenantDomain() != null; case FrameworkConstants.JSAttributes.JS_LOCAL_CLAIMS: return idp != null; case FrameworkConstants.JSAttributes.JS_REMOTE_CLAIMS: return idp != null && !FrameworkConstants.LOCAL.equals(idp); default: return super.hasMember(name); } } private String[] getLocalRoles() { if (idp == null || FrameworkConstants.LOCAL.equals(idp)) { RealmService realmService = FrameworkServiceDataHolder.getInstance().getRealmService(); int usersTenantId = IdentityTenantUtil.getTenantId(getWrapped().getTenantDomain()); try { String usernameWithDomain = UserCoreUtil.addDomainToName(getWrapped().getUserName(), getWrapped() .getUserStoreDomain()); UserRealm userRealm = realmService.getTenantUserRealm(usersTenantId); return userRealm.getUserStoreManager().getRoleListOfUser(usernameWithDomain); } catch (UserStoreException e) { LOG.error("Error when getting role list of user: " + getWrapped(), e); } } return ArrayUtils.EMPTY_STRING_ARRAY; } }
/* * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.conductor.common.metadata.workflow; import com.github.vmg.protogen.annotations.ProtoEnum; import com.github.vmg.protogen.annotations.ProtoField; import com.github.vmg.protogen.annotations.ProtoMessage; import com.netflix.conductor.common.constraints.NoSemiColonConstraint; import com.netflix.conductor.common.constraints.OwnerEmailMandatoryConstraint; import com.netflix.conductor.common.constraints.TaskReferenceNameUniqueConstraint; import com.netflix.conductor.common.metadata.Auditable; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import javax.validation.Valid; import javax.validation.constraints.Email; import javax.validation.constraints.Max; import javax.validation.constraints.Min; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; /** * @author Viren * */ @ProtoMessage @TaskReferenceNameUniqueConstraint public class WorkflowDef extends Auditable { @ProtoEnum public enum TimeoutPolicy {TIME_OUT_WF, ALERT_ONLY} @NotEmpty(message = "WorkflowDef name cannot be null or empty") @ProtoField(id = 1) @NoSemiColonConstraint(message = "Workflow name cannot contain the following set of characters: ':'") private String name; @ProtoField(id = 2) private String description; @ProtoField(id = 3) private int version = 1; @ProtoField(id = 4) @NotNull @NotEmpty(message = "WorkflowTask list cannot be empty") private List<@Valid WorkflowTask> tasks = new LinkedList<>(); @ProtoField(id = 5) private List<String> inputParameters = new LinkedList<>(); @ProtoField(id = 6) private Map<String, Object> outputParameters = new HashMap<>(); @ProtoField(id = 7) private String failureWorkflow; @ProtoField(id = 8) @Min(value = 2, message = "workflowDef schemaVersion: {value} is only supported") @Max(value = 2, message = "workflowDef schemaVersion: {value} is only supported") private int schemaVersion = 2; //By default a workflow is restartable @ProtoField(id = 9) private boolean restartable = true; @ProtoField(id = 10) private boolean workflowStatusListenerEnabled = false; @ProtoField(id = 11) @OwnerEmailMandatoryConstraint @Email(message = "ownerEmail should be valid email address") private String ownerEmail; @ProtoField(id = 12) private TimeoutPolicy timeoutPolicy = TimeoutPolicy.ALERT_ONLY; @ProtoField(id = 13) @NotNull private long timeoutSeconds; @ProtoField(id = 14) private Map<String, Object> variables = new HashMap<>(); /** * @return the name */ public String getName() { return name; } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @return the description */ public String getDescription() { return description; } /** * @param description the description to set */ public void setDescription(String description) { this.description = description; } /** * @return the tasks */ public List<WorkflowTask> getTasks() { return tasks; } /** * @param tasks the tasks to set */ public void setTasks(List<@Valid WorkflowTask> tasks) { this.tasks = tasks; } /** * @return the inputParameters */ public List<String> getInputParameters() { return inputParameters; } /** * @param inputParameters the inputParameters to set */ public void setInputParameters(List<String> inputParameters) { this.inputParameters = inputParameters; } /** * @return the global workflow variables */ public Map<String, Object> getVariables() { return variables; } /** * @param vars the set of global workflow variables to set */ public void setVariables(Map<String, Object> vars) { this.variables = vars; } /** * @return the outputParameters */ public Map<String, Object> getOutputParameters() { return outputParameters; } /** * @param outputParameters the outputParameters to set */ public void setOutputParameters(Map<String, Object> outputParameters) { this.outputParameters = outputParameters; } /** * @return the version */ public int getVersion() { return version; } /** * @return the failureWorkflow */ public String getFailureWorkflow() { return failureWorkflow; } /** * @param failureWorkflow the failureWorkflow to set */ public void setFailureWorkflow(String failureWorkflow) { this.failureWorkflow = failureWorkflow; } /** * @param version the version to set */ public void setVersion(int version) { this.version = version; } /** * This method determines if the workflow is restartable or not * * @return true: if the workflow is restartable * false: if the workflow is non restartable */ public boolean isRestartable() { return restartable; } /** * This method is called only when the workflow definition is created * * @param restartable true: if the workflow is restartable * false: if the workflow is non restartable */ public void setRestartable(boolean restartable) { this.restartable = restartable; } /** * @return the schemaVersion */ public int getSchemaVersion() { return schemaVersion; } /** * @param schemaVersion the schemaVersion to set */ public void setSchemaVersion(int schemaVersion) { this.schemaVersion = schemaVersion; } /** * * @return true is workflow listener will be invoked when workflow gets into a terminal state */ public boolean isWorkflowStatusListenerEnabled() { return workflowStatusListenerEnabled; } /** * Specify if workflow listener is enabled to invoke a callback for completed or terminated workflows * @param workflowStatusListenerEnabled */ public void setWorkflowStatusListenerEnabled(boolean workflowStatusListenerEnabled) { this.workflowStatusListenerEnabled = workflowStatusListenerEnabled; } /** * @return the email of the owner of this workflow definition */ public String getOwnerEmail() { return ownerEmail; } /** * @param ownerEmail the owner email to set */ public void setOwnerEmail(String ownerEmail) { this.ownerEmail = ownerEmail; } /** * @return the timeoutPolicy */ public TimeoutPolicy getTimeoutPolicy() { return timeoutPolicy; } /** * @param timeoutPolicy the timeoutPolicy to set */ public void setTimeoutPolicy(TimeoutPolicy timeoutPolicy) { this.timeoutPolicy = timeoutPolicy; } /** * @return the time after which a workflow is deemed to have timed out */ public long getTimeoutSeconds() { return timeoutSeconds; } /** * @param timeoutSeconds the timeout in seconds to set */ public void setTimeoutSeconds(long timeoutSeconds) { this.timeoutSeconds = timeoutSeconds; } public String key(){ return getKey(name, version); } public static String getKey(String name, int version){ return name + "." + version; } public WorkflowTask getNextTask(String taskReferenceName){ Iterator<WorkflowTask> it = tasks.iterator(); while(it.hasNext()){ WorkflowTask task = it.next(); if (task.getTaskReferenceName().equals(taskReferenceName)) { // If taskReferenceName matches, break out break; } WorkflowTask nextTask = task.next(taskReferenceName, null); if(nextTask != null){ return nextTask; } else if (TaskType.DO_WHILE.name().equals(task.getType()) && !task.getTaskReferenceName().equals(taskReferenceName) && task.has(taskReferenceName)) { // If the task is child of Loop Task and at last position, return null. return null; } if(task.has(taskReferenceName)){ break; } } if(it.hasNext()){ return it.next(); } return null; } public WorkflowTask getTaskByRefName(String taskReferenceName){ return collectTasks().stream() .filter(workflowTask -> workflowTask.getTaskReferenceName().equals(taskReferenceName)) .findFirst() .orElse(null); } public List<WorkflowTask> collectTasks() { List<WorkflowTask> tasks = new LinkedList<>(); for (WorkflowTask workflowTask : this.tasks) { tasks.addAll(workflowTask.collectTasks()); } return tasks; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } WorkflowDef that = (WorkflowDef) o; return getVersion() == that.getVersion() && getSchemaVersion() == that.getSchemaVersion() && Objects.equals(getName(), that.getName()) && Objects.equals(getDescription(), that.getDescription()) && Objects.equals(getTasks(), that.getTasks()) && Objects.equals(getInputParameters(), that.getInputParameters()) && Objects.equals(getOutputParameters(), that.getOutputParameters()) && Objects.equals(getVariables(), that.getVariables()) && Objects.equals(getFailureWorkflow(), that.getFailureWorkflow()) && Objects.equals(getOwnerEmail(), that.getOwnerEmail()) && Objects.equals(getTimeoutSeconds(), that.getTimeoutSeconds()); } @Override public int hashCode() { return Objects .hash( getName(), getDescription(), getVersion(), getTasks(), getInputParameters(), getOutputParameters(), getVariables(), getFailureWorkflow(), getSchemaVersion(), getOwnerEmail(), getTimeoutSeconds() ); } @Override public String toString() { return "WorkflowDef{" + "name='" + name + '\'' + ", description='" + description + '\'' + ", version=" + version + ", tasks=" + tasks + ", inputParameters=" + inputParameters + ", outputParameters=" + outputParameters + ", variables=" + variables + ", failureWorkflow='" + failureWorkflow + '\'' + ", schemaVersion=" + schemaVersion + ", restartable=" + restartable + ", workflowStatusListenerEnabled=" + workflowStatusListenerEnabled + ", timeoutSeconds=" + timeoutSeconds + '}'; } }
public class star3 { public static void main(String[]args) { for(int i=1;i<=5;i++) { for(int j=1;j<=5-i;j++) { System.out.print(" "); } for(int k=1;k<=i;k++) { System.out.print("*"); } System.out.println(); } } }
/* * * Copyright (c) 2006-2020, Speedment, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); You may not * use this file except in compliance with the License. You may obtain a copy of * the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.speedment.runtime.field.method; import com.speedment.common.annotation.GeneratedCode; import java.util.function.Function; import java.util.function.ToLongFunction; /** * A short-cut functional reference to the {@code getXXX(value)} method for a * particular field in an entity. * <p> * A {@code LongGetter<ENTITY>} has the following signature: * {@code * interface ENTITY { * long getXXX(); * } * } * * @param <ENTITY> the entity * * @author Emil Forslund * @since 3.0.0 */ @GeneratedCode(value = "Speedment") @FunctionalInterface public interface LongGetter<ENTITY> extends Getter<ENTITY>, ToLongFunction<ENTITY> { /** * Returns the member represented by this getter in the specified instance. * * @param instance the instance to get from * @return the value */ @Override long applyAsLong(ENTITY instance); @Override default Long apply(ENTITY instance) { return applyAsLong(instance); } @Override default Function<ENTITY, Long> asFunction() { return this::apply; } }
// *************************************************************************************************************************** // * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file * // * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file * // * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance * // * with the License. You may obtain a copy of the License at * // * * // * http://www.apache.org/licenses/LICENSE-2.0 * // * * // * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an * // * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * // * specific language governing permissions and limitations under the License. * // *************************************************************************************************************************** package org.apache.juneau.jena; import static org.apache.juneau.jena.RdfCommon.*; import static org.apache.juneau.jena.RdfSerializer.*; import java.lang.reflect.*; import java.nio.charset.*; import java.util.*; import org.apache.juneau.*; import org.apache.juneau.http.*; import org.apache.juneau.jena.annotation.*; import org.apache.juneau.reflect.*; import org.apache.juneau.serializer.*; import org.apache.juneau.svl.*; import org.apache.juneau.xml.*; import org.apache.juneau.xml.annotation.*; /** * Builder class for building instances of RDF serializers. */ public class RdfSerializerBuilder extends WriterSerializerBuilder { /** * Constructor, default settings. */ public RdfSerializerBuilder() { super(); } /** * Constructor. * * @param ps The initial configuration settings for this builder. */ public RdfSerializerBuilder(PropertyStore ps) { super(ps); } @Override /* ContextBuilder */ public RdfSerializer build() { return build(RdfSerializer.class); } //----------------------------------------------------------------------------------------------------------------- // Properties //----------------------------------------------------------------------------------------------------------------- /** * Configuration property: XML namespace for Juneau properties. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder juneauNs(String value) { return set(RDF_juneauNs, value); } /** * Configuration property: Default XML namespace for bean properties. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder juneauBpNs(String value) { return set(RDF_juneauBpNs, value); } /** * Configuration property: RDF/XML property: <c>iri_rules</c>. * * <p> * Set the engine for checking and resolving. * * <p> * Possible values: * <ul class='spaced-list'> * <li> * <js>"lax"</js> - The rules for RDF URI references only, which does permit spaces although the use of spaces * is not good practice. * <li> * <js>"strict"</js> - Sets the IRI engine with rules for valid IRIs, XLink and RDF; it does not permit spaces * in IRIs. * <li> * <js>"iri"</js> - Sets the IRI engine to IRI * ({@doc http://www.ietf.org/rfc/rfc3986.txt RFC 3986}, * {@doc http://www.ietf.org/rfc/rfc3987.txt RFC 3987}). * </ul> * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder arp_iriRules(String value) { return set(RDF_arp_iriRules, value); } /** * Configuration property: RDF/XML ARP property: <c>error-mode</c>. * * <p> * This allows a coarse-grained approach to control of error handling. * * <p> * Possible values: * <ul> * <li><js>"default"</js> * <li><js>"lax"</js> * <li><js>"strict"</js> * <li><js>"strict-ignore"</js> * <li><js>"strict-warning"</js> * <li><js>"strict-error"</js> * <li><js>"strict-fatal"</js> * </ul> * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder arp_errorMode(String value) { return set(RDF_arp_errorMode, value); } /** * Configuration property: RDF/XML ARP property: <c>error-mode</c>. * * <p> * Sets ARP to look for RDF embedded within an enclosing XML document. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder arp_embedding(boolean value) { return set(RDF_arp_embedding, value); } /** * Configuration property: RDF/XML property: <c>xmlbase</c>. * * <p> * The value to be included for an <xa>xml:base</xa> attribute on the root element in the file. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder rdfxml_xmlBase(String value) { return set(RDF_rdfxml_xmlBase, value); } /** * Configuration property: RDF/XML property: <c>longId</c>. * * <p> * Whether to use long ID's for anon resources. * Short ID's are easier to read, but can run out of memory on very large models. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder rdfxml_longId(boolean value) { return set(RDF_rdfxml_longId, value); } /** * Configuration property: RDF/XML property: <c>allowBadURIs</c>. * * <p> * URIs in the graph are, by default, checked prior to serialization. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder rdfxml_allowBadUris(boolean value) { return set(RDF_rdfxml_allowBadUris, value); } /** * Configuration property: RDF/XML property: <c>relativeURIs</c>. * * <p> * What sort of relative URIs should be used. * * <p> * A comma separate list of options: * <ul class='spaced-list'> * <li> * <js>"same-document"</js> - Same-document references (e.g. <js>""</js> or <js>"#foo"</js>) * <li> * <js>"network"</js> - Network paths (e.g. <js>"//example.org/foo"</js> omitting the URI scheme) * <li> * <js>"absolute"</js> - Absolute paths (e.g. <js>"/foo"</js> omitting the scheme and authority) * <li> * <js>"relative"</js> - Relative path not beginning in <js>"../"</js> * <li> * <js>"parent"</js> - Relative path beginning in <js>"../"</js> * <li> * <js>"grandparent"</js> - Relative path beginning in <js>"../../"</js> * </ul> * * <p> * The default value is <js>"same-document, absolute, relative, parent"</js>. * To switch off relative URIs use the value <js>""</js>. * Relative URIs of any of these types are output where possible if and only if the option has been specified. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder rdfxml_relativeUris(String value) { return set(RDF_rdfxml_relativeUris, value); } /** * Configuration property: RDF/XML property: <c>showXmlDeclaration</c>. * * <p> * Possible values: * <ul class='spaced-list'> * <li> * <js>"true"</js> - Add XML Declaration to the output. * <li> * <js>"false"</js> - Don't add XML Declaration to the output. * <li> * <js>"default"</js> - Only add an XML Declaration when asked to write to an <c>OutputStreamWriter</c> * that uses some encoding other than <c>UTF-8</c> or <c>UTF-16</c>. * In this case the encoding is shown in the XML declaration. * </ul> * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder rdfxml_showXmlDeclaration(String value) { return set(RDF_rdfxml_showXmlDeclaration, value); } /** * Configuration property: RDF/XML property: <c>showDoctypeDeclaration</c>. * * <p> * If true, an XML doctype declaration is included in the output. * This declaration includes a <c>!ENTITY</c> declaration for each prefix mapping in the model, and any * attribute value that starts with the URI of that mapping is written as starting with the corresponding entity * invocation. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder rdfxml_showDoctypeDeclaration(boolean value) { return set(RDF_rdfxml_showDoctypeDeclaration, value); } /** * Configuration property: RDF/XML property: <c>tab</c>. * * <p> * The number of spaces with which to indent XML child elements. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder rdfxml_tab(int value) { return set(RDF_rdfxml_tab, value); } /** * Configuration property: RDF/XML property: <c>attributeQuoteChar</c>. * * <p> * The XML attribute quote character. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder rdfxml_attributeQuoteChar(String value) { return set(RDF_rdfxml_attributeQuoteChar, value); } /** * Configuration property: RDF/XML property: <c>blockRules</c>. * * <p> * A list of <c>Resource</c> or a <c>String</c> being a comma separated list of fragment IDs from * {@doc http://www.w3.org/TR/rdf-syntax-grammar RDF Syntax Grammar} indicating grammar * rules that will not be used. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder rdfxml_blockRules(String value) { return set(RDF_rdfxml_blockRules, value); } /** * Configuration property: N3/Turtle property: <c>minGap</c>. * * <p> * Minimum gap between items on a line. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder n3_minGap(int value) { return set(RDF_n3_minGap, value); } /** * Configuration property: N3/Turtle property: <c>objectLists</c>. * * <p> * Print object lists as comma separated lists. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder n3_objectLists(boolean value) { return set(RDF_n3_objectLists, value); } /** * Configuration property: N3/Turtle property: <c>subjectColumn</c>. * * <p> * If the subject is shorter than this value, the first property may go on the same line. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder n3_subjectColumn(int value) { return set(RDF_n3_subjectColumn, value); } /** * Configuration property: N3/Turtle property: <c>propertyColumn</c>. * * <p> * Width of the property column. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder n3_propertyColumn(int value) { return set(RDF_n3_propertyColumn, value); } /** * Configuration property: N3/Turtle property: <c>indentProperty</c>. * * <p> * Width to indent properties. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder n3_indentProperty(int value) { return set(RDF_n3_indentProperty, value); } /** * Configuration property: N3/Turtle property: <c>widePropertyLen</c>. * * <p> * Width of the property column. * Must be longer than <c>propertyColumn</c>. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder n3_widePropertyLen(int value) { return set(RDF_n3_widePropertyLen, value); } /** * Configuration property: N3/Turtle property: <c>abbrevBaseURI</c>. * * <p> * Control whether to use abbreviations <c>&lt;&gt;</c> or <c>&lt;#&gt;</c>. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder n3_abbrevBaseUri(boolean value) { return set(RDF_n3_abbrevBaseUri, value); } /** * Configuration property: N3/Turtle property: <c>usePropertySymbols</c>. * * <p> * Control whether to use <c>a</c>, <c>=</c> and <c>=&gt;</c> in output * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder n3_usePropertySymbols(boolean value) { return set(RDF_n3_usePropertySymbols, value); } /** * Configuration property: N3/Turtle property: <c>useTripleQuotedStrings</c>. * * <p> * Allow the use of <c>"""</c> to delimit long strings. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder n3_useTripleQuotedStrings(boolean value) { return set(RDF_n3_useTripleQuotedStrings, value); } /** * Configuration property: N3/Turtle property: <c>useDoubles</c>. * * <p> * Allow the use doubles as <c>123.456</c>. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder n3_useDoubles(boolean value) { return set(RDF_n3_useDoubles, value); } /** * Configuration property: RDF format for representing collections and arrays. * * <p> * Possible values: * <ul class='spaced-list'> * <li> * <js>"DEFAULT"</js> - Default format. The default is an RDF Sequence container. * <li> * <js>"SEQ"</js> - RDF Sequence container. * <li> * <js>"BAG"</js> - RDF Bag container. * <li> * <js>"LIST"</js> - RDF List container. * <li> * <js>"MULTI_VALUED"</js> - Multi-valued properties. * </ul> * * <ul class='notes'> * <li> * If you use <js>"BAG"</js> or <js>"MULTI_VALUED"</js>, the order of the elements in the collection will get * lost. * </ul> * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder collectionFormat(String value) { return set(RDF_collectionFormat, value); } /** * Configuration property: Default namespaces. * * <p> * The default list of namespaces associated with this serializer. * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder namespaces(String[] value) { return set(RDF_namespaces, value); } /** * Configuration property: Add XSI data types to non-<c>String</c> literals. * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_addLiteralTypes} * </ul> * * @param value * The new value for this property. * <br>The default is <jk>false</jk>. * @return This object (for method chaining). */ public RdfSerializerBuilder addLiteralTypes(boolean value) { return set(RDF_addLiteralTypes, value); } /** * Configuration property: Add XSI data types to non-<c>String</c> literals. * * <p> * Shortcut for calling <code>addLiteralTypes(<jk>true</jk>)</code>. * * @return This object (for method chaining). */ public RdfSerializerBuilder addLiteralTypes() { return set(RDF_addLiteralTypes, true); } /** * Configuration property: Add RDF root identifier property to root node. * * <p> * When enabled an RDF property <c>http://www.apache.org/juneau/root</c> is added with a value of <js>"true"</js> * to identify the root node in the graph. * This helps locate the root node during parsing. * * <p> * If disabled, the parser has to search through the model to find any resources without incoming predicates to * identify root notes, which can introduce a considerable performance degradation. * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_addRootProperty} * </ul> * * @param value * The new value for this property. * <br>The default is <jk>false</jk>. * @return This object (for method chaining). */ public RdfSerializerBuilder addRootProperty(boolean value) { return set(RDF_addRootProperty, value); } /** * Configuration property: Add RDF root identifier property to root node. * * <p> * Shortcut for calling <code>addRootProperty(<jk>true</jk>)</code>. * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_addRootProperty} * </ul> * * @return This object (for method chaining). */ public RdfSerializerBuilder addRootProperty() { return set(RDF_addRootProperty, true); } /** * Configuration property: Auto-detect namespace usage. * * <p> * Detect namespace usage before serialization. * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_autoDetectNamespaces} * </ul> * * @param value * The new value for this property. * <br>The default is <jk>true</jk>. * @return This object (for method chaining). */ public RdfSerializerBuilder autoDetectNamespaces(boolean value) { return set(RDF_autoDetectNamespaces, value); } /** * Configuration property: RDF format for representing collections and arrays. * * <p> * * <ul class='notes'> * <li> * If you use <js>"BAG"</js> or <js>"MULTI_VALUED"</js>, the order of the elements in the collection will get * lost. * </ul> * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_collectionFormat} * </ul> * * @param value * The new value for this property. * <br>Possible values: * <ul> * <li><js>"DEFAULT"</js> - Default format. The default is an RDF Sequence container. * <li><js>"SEQ"</js> - RDF Sequence container. * <li><js>"BAG"</js> - RDF Bag container. * <li><js>"LIST"</js> - RDF List container. * <li><js>"MULTI_VALUED"</js> - Multi-valued properties. * </ul> * @return This object (for method chaining). */ public RdfSerializerBuilder collectionFormat(RdfCollectionFormat value) { return set(RDF_collectionFormat, value); } /** * Configuration property: Default XML namespace for bean properties. * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_juneauBpNs} * </ul> * * @param value * The new value for this property. * <br>The default is <code>{j:<js>'http://www.apache.org/juneaubp/'</js>}</code>. * @return This object (for method chaining). */ public RdfSerializerBuilder juneauBpNs(Namespace value) { return set(RDF_juneauBpNs, value); } /** * Configuration property: XML namespace for Juneau properties. * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_juneauNs} * </ul> * * @param value * The new value for this property. * <br>The default is <code>{j:<js>'http://www.apache.org/juneau/'</js>}</code>. * @return This object (for method chaining). */ public RdfSerializerBuilder juneauNs(Namespace value) { return set(RDF_juneauNs, value); } /** * Configuration property: RDF language. * * <p> * Can be any of the following: * <ul class='spaced-list'> * <li> * <js>"RDF/XML"</js> * <li> * <js>"RDF/XML-ABBREV"</js> (default) * <li> * <js>"N-TRIPLE"</js> * <li> * <js>"N3"</js> - General name for the N3 writer. * Will make a decision on exactly which writer to use (pretty writer, plain writer or simple writer) when * created. * Default is the pretty writer but can be overridden with system property * <c>org.apache.jena.n3.N3JenaWriter.writer</c>. * <li> * <js>"N3-PP"</js> - Name of the N3 pretty writer. * The pretty writer uses a frame-like layout, with prefixing, clustering like properties and embedding * one-referenced bNodes. * <li> * <js>"N3-PLAIN"</js> - Name of the N3 plain writer. * The plain writer writes records by subject. * <li> * <js>"N3-TRIPLES"</js> - Name of the N3 triples writer. * This writer writes one line per statement, like N-Triples, but does N3-style prefixing. * <li> * <js>"TURTLE"</js> - Turtle writer. * http://www.dajobe.org/2004/01/turtle/ * </ul> * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_language} * </ul> * * @param value * The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder language(String value) { return set(RDF_language, value); } /** * Configuration property: Collections should be serialized and parsed as loose collections. * * <p> * When specified, collections of resources are handled as loose collections of resources in RDF instead of * resources that are children of an RDF collection (e.g. Sequence, Bag). * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_looseCollections} * </ul> * * @param value * The new value for this property. * <br>The default is <jk>false</jk>. * @return This object (for method chaining). */ public RdfSerializerBuilder looseCollections(boolean value) { return set(RDF_looseCollections, value); } /** * Configuration property: Collections should be serialized and parsed as loose collections. * * <p> * Shortcut for <code>looseCollections(<jk>true</jk>)</code>. * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_looseCollections} * </ul> * * @return This object (for method chaining). */ public RdfSerializerBuilder looseCollections() { return set(RDF_looseCollections, true); } /** * Configuration property: RDF language. * * <p> * Shortcut for calling <code>language(<jsf>LANG_N3</jsf>)</code> * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_language} * </ul> * * @return This object (for method chaining). */ public RdfSerializerBuilder n3() { return language(Constants.LANG_N3); } /** * Configuration property: Default namespaces. * * <p> * The default list of namespaces associated with this serializer. * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_namespaces} * </ul> * * @param values The new value for this property. * @return This object (for method chaining). */ public RdfSerializerBuilder namespaces(Namespace...values) { return set(RDF_namespaces, values); } /** * Configuration property: RDF language. * * <p> * Shortcut for calling <code>language(<jsf>LANG_NTRIPLE</jsf>)</code> * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_language} * </ul> * * @return This object (for method chaining). */ public RdfSerializerBuilder ntriple() { return language(Constants.LANG_NTRIPLE); } /** * Configuration property: RDF language. * * <p> * Shortcut for calling <code>language(<jsf>LANG_TURTLE</jsf>)</code> * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_language} * </ul> * * @return This object (for method chaining). */ public RdfSerializerBuilder turtle() { return language(Constants.LANG_TURTLE); } /** * Configuration property: Reuse XML namespaces when RDF namespaces not specified. * * <p> * When specified, namespaces defined using {@link XmlNs @XmlNs} and {@link org.apache.juneau.xml.annotation.Xml Xml} will be * inherited by the RDF serializers. * Otherwise, namespaces will be defined using {@link RdfNs @RdfNs} and {@link Rdf @Rdf}. * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_useXmlNamespaces} * </ul> * * @param value * The new value for this property. * <br>The default is <jk>true</jk>. * @return This object (for method chaining). */ public RdfSerializerBuilder useXmlNamespaces(boolean value) { return set(RDF_useXmlNamespaces, value); } /** * Configuration property: RDF language. * * <p> * Shortcut for calling <code>language(<jsf>LANG_RDF_XML</jsf>)</code> * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_language} * </ul> * * @return This object (for method chaining). */ public RdfSerializerBuilder xml() { return language(Constants.LANG_RDF_XML); } /** * Configuration property: RDF language. * * <p> * Shortcut for calling <code>language(<jsf>LANG_RDF_XML_ABBREV</jsf>)</code> * * <ul class='seealso'> * <li class='jf'>{@link RdfSerializer#RDF_language} * </ul> * * @return This object (for method chaining). */ public RdfSerializerBuilder xmlabbrev() { return language(Constants.LANG_RDF_XML_ABBREV); } @Override /* WriterSerializerBuilder */ public RdfSerializerBuilder fileCharset(Charset value) { super.fileCharset(value); return this; } @Override /* WriterSerializerBuilder */ public RdfSerializerBuilder maxIndent(int value) { super.maxIndent(value); return this; } @Override /* WriterSerializerBuilder */ public RdfSerializerBuilder quoteChar(char value) { super.quoteChar(value); return this; } @Override /* WriterSerializerBuilder */ public RdfSerializerBuilder sq() { super.sq(); return this; } @Override /* WriterSerializerBuilder */ public RdfSerializerBuilder streamCharset(Charset value) { super.streamCharset(value); return this; } @Override /* WriterSerializerBuilder */ public RdfSerializerBuilder useWhitespace(boolean value) { super.useWhitespace(value); return this; } @Override /* WriterSerializerBuilder */ public RdfSerializerBuilder useWhitespace() { super.useWhitespace(); return this; } @Override /* WriterSerializerBuilder */ public RdfSerializerBuilder ws() { super.ws(); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder addBeanTypes(boolean value) { super.addBeanTypes(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder addBeanTypes() { super.addBeanTypes(); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder addRootType(boolean value) { super.addRootType(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder addRootType() { super.addRootType(); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder detectRecursions(boolean value) { super.detectRecursions(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder detectRecursions() { super.detectRecursions(); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder ignoreRecursions(boolean value) { super.ignoreRecursions(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder ignoreRecursions() { super.ignoreRecursions(); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder initialDepth(int value) { super.initialDepth(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder listener(Class<? extends SerializerListener> value) { super.listener(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder maxDepth(int value) { super.maxDepth(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder sortCollections(boolean value) { super.sortCollections(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder sortCollections() { super.sortCollections(); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder sortMaps(boolean value) { super.sortMaps(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder sortMaps() { super.sortMaps(); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder trimEmptyCollections(boolean value) { super.trimEmptyCollections(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder trimEmptyCollections() { super.trimEmptyCollections(); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder trimEmptyMaps(boolean value) { super.trimEmptyMaps(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder trimEmptyMaps() { super.trimEmptyMaps(); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder trimNullProperties(boolean value) { super.trimNullProperties(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder trimStrings(boolean value) { super.trimStrings(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder trimStrings() { super.trimStrings(); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder uriContext(UriContext value) { super.uriContext(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder uriRelativity(UriRelativity value) { super.uriRelativity(value); return this; } @Override /* SerializerBuilder */ public RdfSerializerBuilder uriResolution(UriResolution value) { super.uriResolution(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanClassVisibility(Visibility value) { super.beanClassVisibility(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanConstructorVisibility(Visibility value) { super.beanConstructorVisibility(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanDictionary(Class<?>...values) { super.beanDictionary(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanDictionary(Object...values) { super.beanDictionary(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanDictionaryReplace(Class<?>...values) { super.beanDictionaryReplace(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanDictionaryReplace(Object...values) { super.beanDictionaryReplace(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanDictionaryRemove(Class<?>...values) { super.beanDictionaryRemove(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanDictionaryRemove(Object...values) { super.beanDictionaryRemove(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanFieldVisibility(Visibility value) { super.beanFieldVisibility(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanFilters(Class<?>...values) { super.beanFilters(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanFilters(Object...values) { super.beanFilters(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanFiltersReplace(Class<?>...values) { super.beanFiltersReplace(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanFiltersReplace(Object...values) { super.beanFiltersReplace(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanFiltersRemove(Class<?>...values) { super.beanFiltersRemove(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanFiltersRemove(Object...values) { super.beanFiltersRemove(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanMapPutReturnsOldValue(boolean value) { super.beanMapPutReturnsOldValue(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanMapPutReturnsOldValue() { super.beanMapPutReturnsOldValue(); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanMethodVisibility(Visibility value) { super.beanMethodVisibility(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beansRequireDefaultConstructor(boolean value) { super.beansRequireDefaultConstructor(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beansRequireDefaultConstructor() { super.beansRequireDefaultConstructor(); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beansRequireSerializable(boolean value) { super.beansRequireSerializable(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beansRequireSerializable() { super.beansRequireSerializable(); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beansRequireSettersForGetters(boolean value) { super.beansRequireSettersForGetters(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beansRequireSettersForGetters() { super.beansRequireSettersForGetters(); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beansRequireSomeProperties(boolean value) { super.beansRequireSomeProperties(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder beanTypePropertyName(String value) { super.beanTypePropertyName(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder debug() { super.debug(); return this; } @Override /* BeanContextBuilder */ public <T> RdfSerializerBuilder example(Class<T> c, T o) { super.example(c, o); return this; } @Override /* BeanContextBuilder */ public <T> RdfSerializerBuilder exampleJson(Class<T> c, String value) { super.exampleJson(c, value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder ignoreInvocationExceptionsOnGetters(boolean value) { super.ignoreInvocationExceptionsOnGetters(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder ignoreInvocationExceptionsOnGetters() { super.ignoreInvocationExceptionsOnGetters(); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder ignoreInvocationExceptionsOnSetters(boolean value) { super.ignoreInvocationExceptionsOnSetters(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder ignoreInvocationExceptionsOnSetters() { super.ignoreInvocationExceptionsOnSetters(); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder ignorePropertiesWithoutSetters(boolean value) { super.ignorePropertiesWithoutSetters(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder ignoreUnknownBeanProperties(boolean value) { super.ignoreUnknownBeanProperties(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder ignoreUnknownBeanProperties() { super.ignoreUnknownBeanProperties(); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder ignoreUnknownNullBeanProperties(boolean value) { super.ignoreUnknownNullBeanProperties(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder implClass(Class<?> interfaceClass, Class<?> implClass) { super.implClass(interfaceClass, implClass); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder implClasses(Map<String,Class<?>> values) { super.implClasses(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder locale(Locale value) { super.locale(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder mediaType(MediaType value) { super.mediaType(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder notBeanClasses(Class<?>...values) { super.notBeanClasses(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder notBeanClasses(Object...values) { super.notBeanClasses(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder notBeanClassesReplace(Class<?>...values) { super.notBeanClassesReplace(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder notBeanClassesReplace(Object...values) { super.notBeanClassesReplace(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder notBeanClassesRemove(Class<?>...values) { super.notBeanClassesRemove(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder notBeanClassesRemove(Object...values) { super.notBeanClassesRemove(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder notBeanPackages(Object...values) { super.notBeanPackages(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder notBeanPackages(String...values) { super.notBeanPackages(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder notBeanPackagesReplace(String...values) { super.notBeanPackagesReplace(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder notBeanPackagesReplace(Object...values) { super.notBeanPackagesReplace(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder notBeanPackagesRemove(String...values) { super.notBeanPackagesRemove(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder notBeanPackagesRemove(Object...values) { super.notBeanPackagesRemove(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder pojoSwaps(Class<?>...values) { super.pojoSwaps(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder pojoSwaps(Object...values) { super.pojoSwaps(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder pojoSwapsReplace(Class<?>...values) { super.pojoSwapsReplace(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder pojoSwapsReplace(Object...values) { super.pojoSwapsReplace(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder pojoSwapsRemove(Class<?>...values) { super.pojoSwapsRemove(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder pojoSwapsRemove(Object...values) { super.pojoSwapsRemove(values); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder sortProperties(boolean value) { super.sortProperties(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder sortProperties() { super.sortProperties(); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder timeZone(TimeZone value) { super.timeZone(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder useEnumNames(boolean value) { super.useEnumNames(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder useEnumNames() { super.useEnumNames(); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder useInterfaceProxies(boolean value) { super.useInterfaceProxies(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder useJavaBeanIntrospector(boolean value) { super.useJavaBeanIntrospector(value); return this; } @Override /* BeanContextBuilder */ public RdfSerializerBuilder useJavaBeanIntrospector() { super.useJavaBeanIntrospector(); return this; } @Override /* ContextBuilder */ public RdfSerializerBuilder set(String name, Object value) { super.set(name, value); return this; } @Override /* ContextBuilder */ public RdfSerializerBuilder set(Map<String,Object> properties) { super.set(properties); return this; } @Override /* ContextBuilder */ public RdfSerializerBuilder add(Map<String,Object> properties) { super.add(properties); return this; } @Override /* ContextBuilder */ public RdfSerializerBuilder addTo(String name, Object value) { super.addTo(name, value); return this; } @Override /* ContextBuilder */ public RdfSerializerBuilder addTo(String name, String key, Object value) { super.addTo(name, key, value); return this; } @Override /* ContextBuilder */ public RdfSerializerBuilder removeFrom(String name, Object value) { super.removeFrom(name, value); return this; } @Override /* ContextBuilder */ public RdfSerializerBuilder apply(PropertyStore copyFrom) { super.apply(copyFrom); return this; } @Override /* ContextBuilder */ public RdfSerializerBuilder applyAnnotations(AnnotationList al, VarResolverSession vrs) { super.applyAnnotations(al, vrs); return this; } @Override /* ContextBuilder */ public RdfSerializerBuilder applyAnnotations(Class<?> fromClass) { super.applyAnnotations(fromClass); return this; } @Override /* ContextBuilder */ public RdfSerializerBuilder applyAnnotations(Method fromMethod) { super.applyAnnotations(fromMethod); return this; } }
/* * Prime Developer Trial * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) * * The version of the OpenAPI document: v1 * * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package com.factset.sdk.TimeSeriesAPIforDigitalPortals.models; import java.util.Objects; import java.util.Arrays; import java.util.Map; import java.util.HashMap; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonTypeName; import com.fasterxml.jackson.annotation.JsonValue; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.io.Serializable; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import com.factset.sdk.TimeSeriesAPIforDigitalPortals.JSON; /** * Time range for the returned time series. */ @ApiModel(description = "Time range for the returned time series.") @JsonPropertyOrder({ InlineResponse2004DataRange.JSON_PROPERTY_START, InlineResponse2004DataRange.JSON_PROPERTY_END }) @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen") public class InlineResponse2004DataRange implements Serializable { private static final long serialVersionUID = 1L; public static final String JSON_PROPERTY_START = "start"; private String start; public static final String JSON_PROPERTY_END = "end"; private String end; public InlineResponse2004DataRange() { } @JsonCreator public InlineResponse2004DataRange( @JsonProperty(value=JSON_PROPERTY_START, required=true) String start, @JsonProperty(value=JSON_PROPERTY_END, required=true) String end ) { this(); this.start = start; this.end = end; } public InlineResponse2004DataRange start(String start) { this.start = start; return this; } /** * The starting point of the time range (inclusive). * @return start **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "The starting point of the time range (inclusive).") @JsonProperty(JSON_PROPERTY_START) @JsonInclude(value = JsonInclude.Include.ALWAYS) public String getStart() { return start; } @JsonProperty(JSON_PROPERTY_START) @JsonInclude(value = JsonInclude.Include.ALWAYS) public void setStart(String start) { this.start = start; } public InlineResponse2004DataRange end(String end) { this.end = end; return this; } /** * The ending point of the time range (exclusive). * @return end **/ @javax.annotation.Nonnull @ApiModelProperty(required = true, value = "The ending point of the time range (exclusive).") @JsonProperty(JSON_PROPERTY_END) @JsonInclude(value = JsonInclude.Include.ALWAYS) public String getEnd() { return end; } @JsonProperty(JSON_PROPERTY_END) @JsonInclude(value = JsonInclude.Include.ALWAYS) public void setEnd(String end) { this.end = end; } /** * Return true if this inline_response_200_4_data_range object is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } InlineResponse2004DataRange inlineResponse2004DataRange = (InlineResponse2004DataRange) o; return Objects.equals(this.start, inlineResponse2004DataRange.start) && Objects.equals(this.end, inlineResponse2004DataRange.end); } @Override public int hashCode() { return Objects.hash(start, end); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class InlineResponse2004DataRange {\n"); sb.append(" start: ").append(toIndentedString(start)).append("\n"); sb.append(" end: ").append(toIndentedString(end)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * Copyright (c) 2008-2019, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.client.impl.protocol.codec; import com.hazelcast.client.impl.protocol.ClientMessage; import com.hazelcast.client.impl.protocol.Generated; import com.hazelcast.client.impl.protocol.codec.builtin.*; import com.hazelcast.client.impl.protocol.codec.custom.*; import javax.annotation.Nullable; import static com.hazelcast.client.impl.protocol.ClientMessage.*; import static com.hazelcast.client.impl.protocol.codec.builtin.FixedSizeTypesCodec.*; /* * This file is auto-generated by the Hazelcast Client Protocol Code Generator. * To change this file, edit the templates or the protocol * definitions on the https://github.com/hazelcast/hazelcast-client-protocol * and regenerate it. */ /** * Loads all keys into the store. This is a batch load operation so that an implementation can optimize the multiple loads. */ @Generated("85c315543548949a1ba7a0ee4feaf849") public final class MapLoadAllCodec { //hex: 0x012100 public static final int REQUEST_MESSAGE_TYPE = 73984; //hex: 0x012101 public static final int RESPONSE_MESSAGE_TYPE = 73985; private static final int REQUEST_REPLACE_EXISTING_VALUES_FIELD_OFFSET = PARTITION_ID_FIELD_OFFSET + INT_SIZE_IN_BYTES; private static final int REQUEST_INITIAL_FRAME_SIZE = REQUEST_REPLACE_EXISTING_VALUES_FIELD_OFFSET + BOOLEAN_SIZE_IN_BYTES; private static final int RESPONSE_INITIAL_FRAME_SIZE = RESPONSE_BACKUP_ACKS_FIELD_OFFSET + INT_SIZE_IN_BYTES; private MapLoadAllCodec() { } @edu.umd.cs.findbugs.annotations.SuppressFBWarnings({"URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"}) public static class RequestParameters { /** * name of map */ public java.lang.String name; /** * when <code>true</code>, existing values in the Map will * be replaced by those loaded from the MapLoader */ public boolean replaceExistingValues; } public static ClientMessage encodeRequest(java.lang.String name, boolean replaceExistingValues) { ClientMessage clientMessage = ClientMessage.createForEncode(); clientMessage.setRetryable(false); clientMessage.setAcquiresResource(false); clientMessage.setOperationName("Map.LoadAll"); ClientMessage.Frame initialFrame = new ClientMessage.Frame(new byte[REQUEST_INITIAL_FRAME_SIZE], UNFRAGMENTED_MESSAGE); encodeInt(initialFrame.content, TYPE_FIELD_OFFSET, REQUEST_MESSAGE_TYPE); encodeBoolean(initialFrame.content, REQUEST_REPLACE_EXISTING_VALUES_FIELD_OFFSET, replaceExistingValues); clientMessage.add(initialFrame); StringCodec.encode(clientMessage, name); return clientMessage; } public static MapLoadAllCodec.RequestParameters decodeRequest(ClientMessage clientMessage) { ClientMessage.ForwardFrameIterator iterator = clientMessage.frameIterator(); RequestParameters request = new RequestParameters(); ClientMessage.Frame initialFrame = iterator.next(); request.replaceExistingValues = decodeBoolean(initialFrame.content, REQUEST_REPLACE_EXISTING_VALUES_FIELD_OFFSET); request.name = StringCodec.decode(iterator); return request; } @edu.umd.cs.findbugs.annotations.SuppressFBWarnings({"URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"}) public static class ResponseParameters { } public static ClientMessage encodeResponse() { ClientMessage clientMessage = ClientMessage.createForEncode(); ClientMessage.Frame initialFrame = new ClientMessage.Frame(new byte[RESPONSE_INITIAL_FRAME_SIZE], UNFRAGMENTED_MESSAGE); encodeInt(initialFrame.content, TYPE_FIELD_OFFSET, RESPONSE_MESSAGE_TYPE); clientMessage.add(initialFrame); return clientMessage; } public static MapLoadAllCodec.ResponseParameters decodeResponse(ClientMessage clientMessage) { ClientMessage.ForwardFrameIterator iterator = clientMessage.frameIterator(); ResponseParameters response = new ResponseParameters(); //empty initial frame iterator.next(); return response; } }
package org.apache.fulcrum.intake; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * Base exception thrown by the Intake service. * * @author <a href="mailto:quintonm@bellsouth.net">Quinton McCombs</a> * @version $Id$ */ public class IntakeException extends Exception { /** * Serial version id */ private static final long serialVersionUID = 7078617074679759156L; /** * Constructs a new <code>IntakeException</code> without specified * detail message. */ public IntakeException() { super(); } /** * Constructs a new <code>IntakeException</code> with specified * detail message. * * @param msg The error message. */ public IntakeException(String msg) { super(msg); } /** * Constructs a new <code>IntakeException</code> with specified * nested <code>Throwable</code>. * * @param nested The exception or error that caused this exception * to be thrown. */ public IntakeException(Throwable nested) { super(nested); } /** * Constructs a new <code>IntakeException</code> with specified * detail message and nested <code>Throwable</code>. * * @param msg The error message. * @param nested The exception or error that caused this exception * to be thrown. */ public IntakeException(String msg, Throwable nested) { super(msg, nested); } }
// Copyright 2011 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.enterprise.adaptor; /** * Listener for configuration modification events. */ interface ConfigModificationListener { public void configModified(ConfigModificationEvent ev); }
package com.cy4.betterdungeons.core.network.message; import java.util.function.Supplier; import com.cy4.betterdungeons.client.screen.UpgradeTreeScreen; import net.minecraft.client.Minecraft; import net.minecraft.network.PacketBuffer; import net.minecraftforge.fml.network.NetworkEvent; public class SyncTreeMessage { public SyncTreeMessage() { } public static void encode(SyncTreeMessage message, PacketBuffer buffer) { } public static SyncTreeMessage decode(PacketBuffer buffer) { return new SyncTreeMessage(); } @SuppressWarnings("resource") public static void handle(SyncTreeMessage message, Supplier<NetworkEvent.Context> contextSupplier) { NetworkEvent.Context context = contextSupplier.get(); context.enqueueWork(() -> { if (Minecraft.getInstance().currentScreen instanceof UpgradeTreeScreen) { ((UpgradeTreeScreen)(Minecraft.getInstance().currentScreen)).getUpgradeDialog().refreshWidgets(); } }); context.setPacketHandled(true); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.streampipes.rest.impl.datalake.model; import java.util.List; import java.util.Map; public class PageResult extends DataResult { private int page; private int pageSum; public PageResult(int total, List<String> headers, List<List<Object>> rows, int page, int pageSum) { super(total, headers, rows); this.page = page; this.pageSum = pageSum; } public int getPage() { return page; } public void setPage(int page) { this.page = page; } public int getPageSum() { return pageSum; } public void setPageSum(int pageSum) { this.pageSum = pageSum; } }
/* Copyright 2022 Noah McLean * * Redistribution and use in source and binary forms, with * or without modification, are permitted provided that the * following conditions are met: * * 1. Redistributions of source code must retain the above * copyright notice, this list of conditions and the * following disclaimer. * * 2. Redistributions in binary form must reproduce the * above copyright notice, this list of conditions and * the following disclaimer in the documentation and/or * other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the * names of its contributors may be used to endorse or * promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package mtools.apps.litemessage.control.logic; /** * The different types of commands that will be determined * by the {@link CommandParseModule} * @author Noah * */ public enum CommandType { DO_NOTHING, EXIT; }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package mage.cards.g; import java.util.UUID; import mage.MageInt; import mage.abilities.common.EntersBattlefieldTriggeredAbility; import mage.abilities.effects.common.ReturnToHandChosenControlledPermanentEffect; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.SubType; import mage.filter.common.FilterControlledPermanent; import mage.filter.predicate.mageobject.HistoricPredicate; import mage.filter.predicate.permanent.AnotherPredicate; /** * * @author Rystan */ public final class GuardiansOfKoilos extends CardImpl { private static final FilterControlledPermanent filter = new FilterControlledPermanent("another historic permanent you control"); static { filter.add(new AnotherPredicate()); filter.add(new HistoricPredicate()); } public GuardiansOfKoilos(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.ARTIFACT, CardType.CREATURE}, "{5}"); this.subtype.add(SubType.CONSTRUCT); this.power = new MageInt(4); this.toughness = new MageInt(4); // When Guardians of Koilos enters the battlefield, you may return another target historic permanent you control to its owner's hand. this.addAbility(new EntersBattlefieldTriggeredAbility(new ReturnToHandChosenControlledPermanentEffect(filter) .setText("you may return another target historic permanent you control to its owner's hand. <i>(Artifacts, legendaries, and Sagas are historic.)</i>"), true)); } public GuardiansOfKoilos(final GuardiansOfKoilos card) { super(card); } @Override public GuardiansOfKoilos copy() { return new GuardiansOfKoilos(this); } }
package com.epam.jdi.light.ui.html.asserts; import com.epam.jdi.light.asserts.generic.UIAssert; import com.epam.jdi.light.common.JDIAction; import com.epam.jdi.light.ui.html.elements.common.ColorPicker; import static com.epam.jdi.light.asserts.core.SoftAssert.jdiAssert; import static org.hamcrest.Matchers.is; /** * Created by Roman Iovlev on 14.02.2018 * Email: roman.iovlev.jdi@gmail.com; Skype: roman.iovlev */ public class ColorAssert extends UIAssert<ColorAssert, ColorPicker> { @JDIAction("Assert that '{name}' color is {0}") public ColorAssert color(String color) { jdiAssert(element.color(), is(color)); return this; } }
package com.capitalone.dashboard.builder; import com.capitalone.dashboard.enums.Clazz; import com.capitalone.dashboard.enums.HelmStatus; import com.capitalone.dashboard.model.BaseModel; import com.capitalone.dashboard.model.Chart; import com.capitalone.dashboard.model.Release; import com.capitalone.dashboard.model.Repo; import java.time.LocalDateTime; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; import java.time.temporal.ChronoField; import java.util.Locale; import java.util.Map; /** * Application configuration and bootstrap */ public class ModelBuilder { public static BaseModel createModelObject(Class<?> clazz, Map<String, String> args) { switch (Clazz.valueOf(clazz.getSimpleName())) { case Chart: final DateTimeFormatter chartDateTimeFormatter = new DateTimeFormatterBuilder() .parseCaseInsensitive() .appendPattern("EEE MMM d HH:mm:ss yyyy") .toFormatter(Locale.US); final LocalDateTime chartDateTime = LocalDateTime.parse(args.get("UPDATED").replaceAll(" {2}", " "), chartDateTimeFormatter); return new Chart(chartDateTime.atZone(ZoneOffset.systemDefault()).toEpochSecond(), args.get("STATUS").toUpperCase(), args.get("CHART"), args.get("APP VERSION"), args.get("DESCRIPTION")); case Repo: return new Repo(args.get("NAME"), args.get("URL")); case Release: final DateTimeFormatter releaseDateTimeFormatter = new DateTimeFormatterBuilder() .parseCaseInsensitive() .appendPattern("yyyy-MM-dd HH:mm:ss") .appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true) .appendPattern(" Z zzz") .toFormatter(Locale.US); final ZonedDateTime releaseDateTime = ZonedDateTime.parse(args.get("UPDATED"), releaseDateTimeFormatter); return new Release(args.get("NAME"), args.get("APP VERSION"), releaseDateTime.toEpochSecond(), HelmStatus.valueOf(args.get("STATUS").toUpperCase()), args.get("CHART"), args.get("NAMESPACE")); default: throw new IllegalStateException("Unexpected value: " + Clazz.valueOf(clazz.getSimpleName())); } } }
package com.example.xyzreader.ui; import android.app.Fragment; import android.app.LoaderManager; import android.content.Intent; import android.content.Loader; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.Color; import android.graphics.Rect; import android.graphics.Typeface; import android.graphics.drawable.ColorDrawable; import android.os.Bundle; import android.support.v4.app.ShareCompat; import android.support.v7.graphics.Palette; import android.text.Html; import android.text.format.DateUtils; import android.text.method.LinkMovementMethod; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import com.android.volley.VolleyError; import com.android.volley.toolbox.ImageLoader; import com.example.xyzreader.R; import com.example.xyzreader.data.ArticleLoader; /** * A fragment representing a single Article detail screen. This fragment is * either contained in a {@link ArticleListActivity} in two-pane mode (on * tablets) or a {@link ArticleDetailActivity} on handsets. */ public class ArticleDetailFragment2 extends Fragment implements LoaderManager.LoaderCallbacks<Cursor> { private static final String TAG = "ArticleDetailFragment"; public static final String ARG_ITEM_ID = "item_id"; private static final float PARALLAX_FACTOR = 1.25f; private Cursor mCursor; private long mItemId; private View mRootView; private int mMutedColor = 0xFF333333; private ObservableScrollView mScrollView; private DrawInsetsFrameLayout mDrawInsetsFrameLayout; private ColorDrawable mStatusBarColorDrawable; private int mTopInset; private View mPhotoContainerView; private ImageView mPhotoView; private int mScrollY; private boolean mIsCard = false; private int mStatusBarFullOpacityBottom; /** * Mandatory empty constructor for the fragment manager to instantiate the * fragment (e.g. upon screen orientation changes). */ public ArticleDetailFragment2() { } public static ArticleDetailFragment newInstance(long itemId) { Bundle arguments = new Bundle(); arguments.putLong(ARG_ITEM_ID, itemId); ArticleDetailFragment fragment = new ArticleDetailFragment(); fragment.setArguments(arguments); return fragment; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (getArguments().containsKey(ARG_ITEM_ID)) { mItemId = getArguments().getLong(ARG_ITEM_ID); } mIsCard = getResources().getBoolean(R.bool.detail_is_card); mStatusBarFullOpacityBottom = getResources().getDimensionPixelSize( R.dimen.detail_card_top_margin); setHasOptionsMenu(true); } public ArticleDetailActivity getActivityCast() { return (ArticleDetailActivity) getActivity(); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); // In support library r8, calling initLoader for a fragment in a FragmentPagerAdapter in // the fragment's onCreate may cause the same LoaderManager to be dealt to multiple // fragments because their mIndex is -1 (haven't been added to the activity yet). Thus, // we do this in onActivityCreated. getLoaderManager().initLoader(0, null, this); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { mRootView = inflater.inflate(R.layout.fragment_article_detail, container, false); mDrawInsetsFrameLayout = (DrawInsetsFrameLayout) mRootView.findViewById(R.id.draw_insets_frame_layout); mDrawInsetsFrameLayout.setOnInsetsCallback(new DrawInsetsFrameLayout.OnInsetsCallback() { @Override public void onInsetsChanged(Rect insets) { mTopInset = insets.top; } }); mScrollView = (ObservableScrollView) mRootView.findViewById(R.id.scrollview); mScrollView.setCallbacks(new ObservableScrollView.Callbacks() { @Override public void onScrollChanged() { mScrollY = mScrollView.getScrollY(); mPhotoContainerView.setTranslationY((int) (mScrollY - mScrollY / PARALLAX_FACTOR)); updateStatusBar(); } }); mPhotoView = (ImageView) mRootView.findViewById(R.id.photo); mPhotoContainerView = mRootView.findViewById(R.id.photo_container); mStatusBarColorDrawable = new ColorDrawable(0); mRootView.findViewById(R.id.share_fab).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { startActivity(Intent.createChooser(ShareCompat.IntentBuilder.from(getActivity()) .setType("text/plain") .setText("Some sample text") .getIntent(), getString(R.string.action_share))); } }); bindViews(); updateStatusBar(); return mRootView; } private void updateStatusBar() { int color = 0; if (mPhotoView != null && mTopInset != 0 && mScrollY > 0) { float f = progress(mScrollY, mStatusBarFullOpacityBottom - mTopInset * 3, mStatusBarFullOpacityBottom - mTopInset); color = Color.argb((int) (255 * f), (int) (Color.red(mMutedColor) * 0.9), (int) (Color.green(mMutedColor) * 0.9), (int) (Color.blue(mMutedColor) * 0.9)); } mStatusBarColorDrawable.setColor(color); mDrawInsetsFrameLayout.setInsetBackground(mStatusBarColorDrawable); } static float progress(float v, float min, float max) { return constrain((v - min) / (max - min), 0, 1); } static float constrain(float val, float min, float max) { if (val < min) { return min; } else if (val > max) { return max; } else { return val; } } private void bindViews() { if (mRootView == null) { return; } TextView titleView = (TextView) mRootView.findViewById(R.id.article_title); TextView bylineView = (TextView) mRootView.findViewById(R.id.article_byline); bylineView.setMovementMethod(new LinkMovementMethod()); TextView bodyView = (TextView) mRootView.findViewById(R.id.article_body); // TODO: Remove // bodyView.setTypeface(Typeface.createFromAsset(getResources().getAssets(), "Rosario-Regular.ttf")); if (mCursor != null) { mRootView.setAlpha(0); mRootView.setVisibility(View.VISIBLE); mRootView.animate().alpha(1); titleView.setText(mCursor.getString(ArticleLoader.Query.TITLE)); bylineView.setText(Html.fromHtml( DateUtils.getRelativeTimeSpanString( mCursor.getLong(ArticleLoader.Query.PUBLISHED_DATE), System.currentTimeMillis(), DateUtils.HOUR_IN_MILLIS, DateUtils.FORMAT_ABBREV_ALL).toString() + " by <font color='#ffffff'>" + mCursor.getString(ArticleLoader.Query.AUTHOR) + "</font>")); bodyView.setText(Html.fromHtml(mCursor.getString(ArticleLoader.Query.BODY))); ImageLoaderHelper.getInstance(getActivity()).getImageLoader() .get(mCursor.getString(ArticleLoader.Query.PHOTO_URL), new ImageLoader.ImageListener() { @Override public void onResponse(ImageLoader.ImageContainer imageContainer, boolean b) { Bitmap bitmap = imageContainer.getBitmap(); if (bitmap != null) { Palette p = Palette.generate(bitmap, 12); mMutedColor = p.getDarkMutedColor(0xFF333333); mPhotoView.setImageBitmap(imageContainer.getBitmap()); mRootView.findViewById(R.id.meta_bar) .setBackgroundColor(mMutedColor); updateStatusBar(); } } @Override public void onErrorResponse(VolleyError volleyError) { } }); } else { mRootView.setVisibility(View.GONE); titleView.setText("N/A"); bylineView.setText("N/A" ); bodyView.setText("N/A"); } } @Override public Loader<Cursor> onCreateLoader(int i, Bundle bundle) { return ArticleLoader.newInstanceForItemId(getActivity(), mItemId); } @Override public void onLoadFinished(Loader<Cursor> cursorLoader, Cursor cursor) { if (!isAdded()) { if (cursor != null) { cursor.close(); } return; } mCursor = cursor; if (mCursor != null && !mCursor.moveToFirst()) { Log.e(TAG, "Error reading item detail cursor"); mCursor.close(); mCursor = null; } bindViews(); } @Override public void onLoaderReset(Loader<Cursor> cursorLoader) { mCursor = null; bindViews(); } public int getUpButtonFloor() { if (mPhotoContainerView == null || mPhotoView.getHeight() == 0) { return Integer.MAX_VALUE; } // account for parallax return mIsCard ? (int) mPhotoContainerView.getTranslationY() + mPhotoView.getHeight() - mScrollY : mPhotoView.getHeight() - mScrollY; } }
package globalDefinition; public class HttpDefinition { public static final String CONTENT_TYPE_JSON = "Content-Type: application/json"; }
/* * Copyright (c) 2018, 7u83 <7u83@mail.ru> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package opensesim.world; import opensesim.sesim.interfaces.FeeModel; /** * * @author 7u83 <7u83@mail.ru> */ public class DefaultFeeModel implements FeeModel{ public AssetPack xgetTakerFee(AssetPair pair) { double vol = 5; return new AssetPack(pair.getCurrency(),vol); } @Override public double getTakerFee(double x) { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.gcp.spanner; import static java.util.stream.Collectors.toList; import static org.apache.beam.sdk.io.gcp.spanner.MutationUtils.isPointDelete; import static org.apache.beam.sdk.io.gcp.spanner.changestreams.ChangeStreamsConstants.DEFAULT_CHANGE_STREAM_NAME; import static org.apache.beam.sdk.io.gcp.spanner.changestreams.ChangeStreamsConstants.DEFAULT_INCLUSIVE_END_AT; import static org.apache.beam.sdk.io.gcp.spanner.changestreams.ChangeStreamsConstants.DEFAULT_INCLUSIVE_START_AT; import static org.apache.beam.sdk.io.gcp.spanner.changestreams.ChangeStreamsConstants.DEFAULT_RPC_PRIORITY; import static org.apache.beam.sdk.io.gcp.spanner.changestreams.ChangeStreamsConstants.MAX_INCLUSIVE_END_AT; import static org.apache.beam.sdk.io.gcp.spanner.changestreams.NameGenerator.generatePartitionMetadataTableName; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkNotNull; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.StatusCode.Code; import com.google.auto.value.AutoValue; import com.google.cloud.ServiceFactory; import com.google.cloud.Timestamp; import com.google.cloud.spanner.AbortedException; import com.google.cloud.spanner.DatabaseId; import com.google.cloud.spanner.Dialect; import com.google.cloud.spanner.ErrorCode; import com.google.cloud.spanner.KeySet; import com.google.cloud.spanner.Mutation; import com.google.cloud.spanner.Mutation.Op; import com.google.cloud.spanner.Options; import com.google.cloud.spanner.Options.RpcPriority; import com.google.cloud.spanner.PartitionOptions; import com.google.cloud.spanner.Spanner; import com.google.cloud.spanner.SpannerException; import com.google.cloud.spanner.SpannerOptions; import com.google.cloud.spanner.Statement; import com.google.cloud.spanner.Struct; import com.google.cloud.spanner.TimestampBound; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.OptionalInt; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import org.apache.beam.runners.core.metrics.GcpResourceIdentifiers; import org.apache.beam.runners.core.metrics.MonitoringInfoConstants; import org.apache.beam.runners.core.metrics.ServiceCallMetric; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.annotations.Experimental.Kind; import org.apache.beam.sdk.coders.SerializableCoder; import org.apache.beam.sdk.io.gcp.spanner.changestreams.ChangeStreamMetrics; import org.apache.beam.sdk.io.gcp.spanner.changestreams.action.ActionFactory; import org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.DaoFactory; import org.apache.beam.sdk.io.gcp.spanner.changestreams.dofn.CleanUpReadChangeStreamDoFn; import org.apache.beam.sdk.io.gcp.spanner.changestreams.dofn.DetectNewPartitionsDoFn; import org.apache.beam.sdk.io.gcp.spanner.changestreams.dofn.InitializeDoFn; import org.apache.beam.sdk.io.gcp.spanner.changestreams.dofn.PostProcessingMetricsDoFn; import org.apache.beam.sdk.io.gcp.spanner.changestreams.dofn.ReadChangeStreamPartitionDoFn; import org.apache.beam.sdk.io.gcp.spanner.changestreams.mapper.MapperFactory; import org.apache.beam.sdk.io.gcp.spanner.changestreams.model.DataChangeRecord; import org.apache.beam.sdk.io.gcp.spanner.changestreams.restriction.ThroughputEstimator; import org.apache.beam.sdk.metrics.Counter; import org.apache.beam.sdk.metrics.Distribution; import org.apache.beam.sdk.metrics.Metrics; import org.apache.beam.sdk.options.StreamingOptions; import org.apache.beam.sdk.options.ValueProvider; import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider; import org.apache.beam.sdk.schemas.Schema; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.Flatten; import org.apache.beam.sdk.transforms.Impulse; import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.Reshuffle; import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.transforms.View; import org.apache.beam.sdk.transforms.Wait; import org.apache.beam.sdk.transforms.WithTimestamps; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.transforms.windowing.DefaultTrigger; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindows; import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.util.BackOff; import org.apache.beam.sdk.util.FluentBackoff; import org.apache.beam.sdk.util.Sleeper; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollection.IsBounded; import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.PCollectionTuple; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.Row; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; import org.apache.beam.sdk.values.TypeDescriptor; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Stopwatch; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.CacheBuilder; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.CacheLoader; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LoadingCache; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.primitives.UnsignedBytes; import org.checkerframework.checker.nullness.qual.Nullable; import org.joda.time.Duration; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Experimental {@link PTransform Transforms} for reading from and writing to <a * href="https://cloud.google.com/spanner">Google Cloud Spanner</a>. * * <h3>Reading from Cloud Spanner</h3> * * <p>To read from Cloud Spanner, apply {@link Read} transformation. It will return a {@link * PCollection} of {@link Struct Structs}, where each element represents an individual row returned * from the read operation. Both Query and Read APIs are supported. See more information about <a * href="https://cloud.google.com/spanner/docs/reads">reading from Cloud Spanner</a> * * <p>To execute a <strong>query</strong>, specify a {@link Read#withQuery(Statement)} or {@link * Read#withQuery(String)} during the construction of the transform. * * <pre>{@code * PCollection<Struct> rows = p.apply( * SpannerIO.read() * .withInstanceId(instanceId) * .withDatabaseId(dbId) * .withQuery("SELECT id, name, email FROM users")); * }</pre> * * <p>To use the Read API, specify a {@link Read#withTable(String) table name} and a {@link * Read#withColumns(List) list of columns}. * * <pre>{@code * PCollection<Struct> rows = p.apply( * SpannerIO.read() * .withInstanceId(instanceId) * .withDatabaseId(dbId) * .withTable("users") * .withColumns("id", "name", "email")); * }</pre> * * <p>To optimally read using index, specify the index name using {@link Read#withIndex}. * * <p>The transform is guaranteed to be executed on a consistent snapshot of data, utilizing the * power of read only transactions. Staleness of data can be controlled using {@link * Read#withTimestampBound} or {@link Read#withTimestamp(Timestamp)} methods. <a * href="https://cloud.google.com/spanner/docs/transactions">Read more</a> about transactions in * Cloud Spanner. * * <p>It is possible to read several {@link PCollection PCollections} within a single transaction. * Apply {@link SpannerIO#createTransaction()} transform, that lazily creates a transaction. The * result of this transformation can be passed to read operation using {@link * Read#withTransaction(PCollectionView)}. * * <pre>{@code * SpannerConfig spannerConfig = ... * * PCollectionView<Transaction> tx = p.apply( * SpannerIO.createTransaction() * .withSpannerConfig(spannerConfig) * .withTimestampBound(TimestampBound.strong())); * * PCollection<Struct> users = p.apply( * SpannerIO.read() * .withSpannerConfig(spannerConfig) * .withQuery("SELECT name, email FROM users") * .withTransaction(tx)); * * PCollection<Struct> tweets = p.apply( * SpannerIO.read() * .withSpannerConfig(spannerConfig) * .withQuery("SELECT user, tweet, date FROM tweets") * .withTransaction(tx)); * }</pre> * * <h3>Writing to Cloud Spanner</h3> * * <p>The Cloud Spanner {@link Write} transform writes to Cloud Spanner by executing a collection of * input row {@link Mutation Mutations}. The mutations are grouped into batches for efficiency. * * <p>To configure the write transform, create an instance using {@link #write()} and then specify * the destination Cloud Spanner instance ({@link Write#withInstanceId(String)} and destination * database ({@link Write#withDatabaseId(String)}). For example: * * <pre>{@code * // Earlier in the pipeline, create a PCollection of Mutations to be written to Cloud Spanner. * PCollection<Mutation> mutations = ...; * // Write mutations. * SpannerWriteResult result = mutations.apply( * "Write", SpannerIO.write().withInstanceId("instance").withDatabaseId("database")); * }</pre> * * <h3>SpannerWriteResult</h3> * * <p>The {@link SpannerWriteResult SpannerWriteResult} object contains the results of the * transform, including a {@link PCollection} of MutationGroups that failed to write, and a {@link * PCollection} that can be used in batch pipelines as a completion signal to {@link Wait * Wait.OnSignal} to indicate when all input has been written. Note that in streaming pipelines, * this signal will never be triggered as the input is unbounded and this {@link PCollection} is * using the {@link GlobalWindow}. * * <h3>Batching and Grouping</h3> * * <p>To reduce the number of transactions sent to Spanner, the {@link Mutation Mutations} are * grouped into batches. The default maximum size of the batch is set to 1MB or 5000 mutated cells, * or 500 rows (whichever is reached first). To override this use {@link * Write#withBatchSizeBytes(long) withBatchSizeBytes()}, {@link Write#withMaxNumMutations(long) * withMaxNumMutations()} or {@link Write#withMaxNumMutations(long) withMaxNumRows()}. Setting * either to a small value or zero disables batching. * * <p>Note that the <a * href="https://cloud.google.com/spanner/quotas#limits_for_creating_reading_updating_and_deleting_data">maximum * size of a single transaction</a> is 20,000 mutated cells - including cells in indexes. If you * have a large number of indexes and are getting exceptions with message: <tt>INVALID_ARGUMENT: The * transaction contains too many mutations</tt> you will need to specify a smaller number of {@code * MaxNumMutations}. * * <p>The batches written are obtained from by grouping enough {@link Mutation Mutations} from the * Bundle provided by Beam to form several batches. This group of {@link Mutation Mutations} is then * sorted by table and primary key, and the batches are created from the sorted group. Each batch * will then have rows for the same table, with keys that are 'close' to each other, thus optimising * write efficiency by each batch affecting as few table splits as possible performance. * * <p>This grouping factor (number of batches) is controlled by the parameter {@link * Write#withGroupingFactor(int) withGroupingFactor()}. * * <p>Note that each worker will need enough memory to hold {@code GroupingFactor x * MaxBatchSizeBytes} Mutations, so if you have a large {@code MaxBatchSize} you may need to reduce * {@code GroupingFactor} * * <p>While Grouping and Batching increases write efficiency, it dramatically increases the latency * between when a Mutation is received by the transform, and when it is actually written to the * database. This is because enough Mutations need to be received to fill the grouped batches. In * Batch pipelines (bounded sources), this is not normally an issue, but in Streaming (unbounded) * pipelines, this latency is often seen as unacceptable. * * <p>There are therefore 3 different ways that this transform can be configured: * * <ul> * <li>With Grouping and Batching. <br> * This is the default for Batch pipelines, where sorted batches of Mutations are created and * written. This is the most efficient way to ingest large amounts of data, but the highest * latency before writing * <li>With Batching but no Grouping <br> * If {@link Write#withGroupingFactor(int) .withGroupingFactor(1)}, is set, grouping is * disabled. This is the default for Streaming pipelines. Unsorted batches are created and * written as soon as enough mutations to fill a batch are received. This reflects a * compromise where a small amount of additional latency enables more efficient writes * <li>Without any Batching <br> * If {@link Write#withBatchSizeBytes(long) .withBatchSizeBytes(0)} is set, no batching is * performed and the Mutations are written to the database as soon as they are received. * ensuring the lowest latency before Mutations are written. * </ul> * * <h3>Monitoring</h3> * * <p>Several counters are provided for monitoring purpooses: * * <ul> * <li><tt>batchable_mutation_groups</tt><br> * Counts the mutations that are batched for writing to Spanner. * <li><tt>unbatchable_mutation_groups</tt><br> * Counts the mutations that can not be batched and are applied individually - either because * they are too large to fit into a batch, or they are ranged deletes. * <li><tt>mutation_group_batches_received, mutation_group_batches_write_success, * mutation_group_batches_write_failed</tt><br> * Count the number of batches that are processed. If Failure Mode is set to {@link * FailureMode#REPORT_FAILURES REPORT_FAILURES}, then failed batches will be split up and the * individual mutation groups retried separately. * <li><tt>mutation_groups_received, mutation_groups_write_success, * mutation_groups_write_fail</tt><br> * Count the number of individual MutationGroups that are processed. * <li><tt>spanner_write_success, spanner_write_fail</tt><br> * The number of writes to Spanner that have occurred. * <li><tt>spanner_write_retries</tt><br> * The number of times a write is retried after a failure - either due to a timeout, or when * batches fail and {@link FailureMode#REPORT_FAILURES REPORT_FAILURES} is set so that * individual Mutation Groups are retried. * <li><tt>spanner_write_timeouts</tt><br> * The number of timeouts that occur when writing to Spanner. Writes that timed out are * retried after a backoff. Large numbers of timeouts suggest an overloaded Spanner instance. * <li><tt>spanner_write_total_latency_ms</tt><br> * The total amount of time spent writing to Spanner, in milliseconds. * </ul> * * <h3>Database Schema Preparation</h3> * * <p>The Write transform reads the database schema on pipeline start to know which columns are used * as primary keys of the tables and indexes. This is so that the transform knows how to sort the * grouped Mutations by table name and primary key as described above. * * <p>If the database schema, any additional tables or indexes are created in the same pipeline then * there will be a race condition, leading to a situation where the schema is read before the table * is created its primary key will not be known. This will mean that the sorting/batching will not * be optimal and performance will be reduced (warnings will be logged for rows using unknown * tables) * * <p>To prevent this race condition, use {@link Write#withSchemaReadySignal(PCollection)} to pass a * signal {@link PCollection} (for example the output of the transform that creates the table(s)) * which will be used with {@link Wait.OnSignal} to prevent the schema from being read until it is * ready. The Write transform will be paused until this signal {@link PCollection} is closed. * * <h3>Transactions</h3> * * <p>The transform does not provide same transactional guarantees as Cloud Spanner. In particular, * * <ul> * <li>Individual Mutations are submitted atomically, but all Mutations are not submitted in the * same transaction. * <li>A Mutation is applied at least once; * <li>If the pipeline was unexpectedly stopped, mutations that were already applied will not get * rolled back. * </ul> * * <p>Use {@link MutationGroup MutationGroups} with the {@link WriteGrouped} transform to ensure * that a small set mutations is bundled together. It is guaranteed that mutations in a {@link * MutationGroup} are submitted in the same transaction. Note that a MutationGroup must not exceed * the Spanner transaction limits. * * <pre>{@code * // Earlier in the pipeline, create a PCollection of MutationGroups to be written to Cloud Spanner. * PCollection<MutationGroup> mutationGroups = ...; * // Write mutation groups. * SpannerWriteResult result = mutationGroups.apply( * "Write", * SpannerIO.write().withInstanceId("instance").withDatabaseId("database").grouped()); * }</pre> * * <h3>Streaming Support</h3> * * <p>{@link Write} can be used as a streaming sink, however as with batch mode note that the write * order of individual {@link Mutation}/{@link MutationGroup} objects is not guaranteed. * * <h3>Updates to the I/O connector code</h3> * * For any significant significant updates to this I/O connector, please consider involving * corresponding code reviewers mentioned <a * href="https://github.com/apache/beam/blob/master/sdks/java/io/google-cloud-platform/OWNERS"> * here</a>. */ @Experimental(Kind.SOURCE_SINK) @SuppressWarnings({ "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402) }) public class SpannerIO { private static final Logger LOG = LoggerFactory.getLogger(SpannerIO.class); private static final long DEFAULT_BATCH_SIZE_BYTES = 1024L * 1024L; // 1 MB // Max number of mutations to batch together. private static final int DEFAULT_MAX_NUM_MUTATIONS = 5000; // Max number of mutations to batch together. private static final int DEFAULT_MAX_NUM_ROWS = 500; // Multiple of mutation size to use to gather and sort mutations private static final int DEFAULT_GROUPING_FACTOR = 1000; // Size of caches for read/write ServiceCallMetric objects . // This is a reasonable limit, as for reads, each worker will process very few different table // read requests, and for writes, batching will ensure that write operations for the same // table occur at the same time (within a bundle). static final int METRICS_CACHE_SIZE = 100; /** * Creates an uninitialized instance of {@link Read}. Before use, the {@link Read} must be * configured with a {@link Read#withInstanceId} and {@link Read#withDatabaseId} that identify the * Cloud Spanner database. */ public static Read read() { return new AutoValue_SpannerIO_Read.Builder() .setSpannerConfig(SpannerConfig.create()) .setTimestampBound(TimestampBound.strong()) .setReadOperation(ReadOperation.create()) .setBatching(true) .build(); } /** * A {@link PTransform} that works like {@link #read}, but executes read operations coming from a * {@link PCollection}. */ public static ReadAll readAll() { return new AutoValue_SpannerIO_ReadAll.Builder() .setSpannerConfig(SpannerConfig.create()) .setTimestampBound(TimestampBound.strong()) .setBatching(true) .build(); } /** * Returns a transform that creates a batch transaction. By default, {@link * TimestampBound#strong()} transaction is created, to override this use {@link * CreateTransaction#withTimestampBound(TimestampBound)}. */ @Experimental public static CreateTransaction createTransaction() { return new AutoValue_SpannerIO_CreateTransaction.Builder() .setSpannerConfig(SpannerConfig.create()) .setTimestampBound(TimestampBound.strong()) .build(); } /** * Creates an uninitialized instance of {@link Write}. Before use, the {@link Write} must be * configured with a {@link Write#withInstanceId} and {@link Write#withDatabaseId} that identify * the Cloud Spanner database being written. */ @Experimental public static Write write() { return new AutoValue_SpannerIO_Write.Builder() .setSpannerConfig(SpannerConfig.create()) .setBatchSizeBytes(DEFAULT_BATCH_SIZE_BYTES) .setMaxNumMutations(DEFAULT_MAX_NUM_MUTATIONS) .setMaxNumRows(DEFAULT_MAX_NUM_ROWS) .setFailureMode(FailureMode.FAIL_FAST) .build(); } /** * Creates an uninitialized instance of {@link ReadChangeStream}. Before use, the {@link * ReadChangeStream} must be configured with a {@link ReadChangeStream#withProjectId}, {@link * ReadChangeStream#withInstanceId}, and {@link ReadChangeStream#withDatabaseId} that identify the * Cloud Spanner database being written. It must also be configured with the start time and the * change stream name. */ @Experimental public static ReadChangeStream readChangeStream() { return new AutoValue_SpannerIO_ReadChangeStream.Builder() .setSpannerConfig(SpannerConfig.create()) .setChangeStreamName(DEFAULT_CHANGE_STREAM_NAME) .setRpcPriority(DEFAULT_RPC_PRIORITY) .setInclusiveStartAt(DEFAULT_INCLUSIVE_START_AT) .setInclusiveEndAt(DEFAULT_INCLUSIVE_END_AT) .build(); } /** Implementation of {@link #readAll}. */ @AutoValue public abstract static class ReadAll extends PTransform<PCollection<ReadOperation>, PCollection<Struct>> { abstract SpannerConfig getSpannerConfig(); abstract @Nullable PCollectionView<Transaction> getTransaction(); abstract @Nullable TimestampBound getTimestampBound(); abstract Builder toBuilder(); @AutoValue.Builder abstract static class Builder { abstract Builder setSpannerConfig(SpannerConfig spannerConfig); abstract Builder setTransaction(PCollectionView<Transaction> transaction); abstract Builder setTimestampBound(TimestampBound timestampBound); abstract Builder setBatching(Boolean batching); abstract ReadAll build(); } /** Specifies the Cloud Spanner configuration. */ public ReadAll withSpannerConfig(SpannerConfig spannerConfig) { return toBuilder().setSpannerConfig(spannerConfig).build(); } /** Specifies the Cloud Spanner project. */ public ReadAll withProjectId(String projectId) { return withProjectId(ValueProvider.StaticValueProvider.of(projectId)); } /** Specifies the Cloud Spanner project. */ public ReadAll withProjectId(ValueProvider<String> projectId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withProjectId(projectId)); } /** Specifies the Cloud Spanner instance. */ public ReadAll withInstanceId(String instanceId) { return withInstanceId(ValueProvider.StaticValueProvider.of(instanceId)); } /** Specifies the Cloud Spanner instance. */ public ReadAll withInstanceId(ValueProvider<String> instanceId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withInstanceId(instanceId)); } /** Specifies the Cloud Spanner database. */ public ReadAll withDatabaseId(String databaseId) { return withDatabaseId(ValueProvider.StaticValueProvider.of(databaseId)); } /** Specifies the Cloud Spanner host. */ public ReadAll withHost(ValueProvider<String> host) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withHost(host)); } public ReadAll withHost(String host) { return withHost(ValueProvider.StaticValueProvider.of(host)); } /** Specifies the Cloud Spanner emulator host. */ public ReadAll withEmulatorHost(ValueProvider<String> emulatorHost) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withEmulatorHost(emulatorHost)); } public ReadAll withEmulatorHost(String emulatorHost) { return withEmulatorHost(ValueProvider.StaticValueProvider.of(emulatorHost)); } /** Specifies the Cloud Spanner database. */ public ReadAll withDatabaseId(ValueProvider<String> databaseId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withDatabaseId(databaseId)); } @VisibleForTesting ReadAll withServiceFactory(ServiceFactory<Spanner, SpannerOptions> serviceFactory) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withServiceFactory(serviceFactory)); } public ReadAll withTransaction(PCollectionView<Transaction> transaction) { return toBuilder().setTransaction(transaction).build(); } public ReadAll withTimestamp(Timestamp timestamp) { return withTimestampBound(TimestampBound.ofReadTimestamp(timestamp)); } public ReadAll withTimestampBound(TimestampBound timestampBound) { return toBuilder().setTimestampBound(timestampBound).build(); } /** * By default Batch API is used to read data from Cloud Spanner. It is useful to disable * batching when the underlying query is not root-partitionable. */ public ReadAll withBatching(boolean batching) { return toBuilder().setBatching(batching).build(); } public ReadAll withLowPriority() { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withRpcPriority(RpcPriority.LOW)); } public ReadAll withHighPriority() { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withRpcPriority(RpcPriority.HIGH)); } abstract Boolean getBatching(); @Override public PCollection<Struct> expand(PCollection<ReadOperation> input) { PTransform<PCollection<ReadOperation>, PCollection<Struct>> readTransform; if (getBatching()) { readTransform = BatchSpannerRead.create(getSpannerConfig(), getTransaction(), getTimestampBound()); } else { readTransform = NaiveSpannerRead.create(getSpannerConfig(), getTransaction(), getTimestampBound()); } return input .apply("Reshuffle", Reshuffle.viaRandomKey()) .apply("Read from Cloud Spanner", readTransform); } /** Helper function to create ServiceCallMetrics. */ static ServiceCallMetric buildServiceCallMetricForReadOp( SpannerConfig config, ReadOperation op) { HashMap<String, String> baseLabels = buildServiceCallMetricLabels(config); baseLabels.put(MonitoringInfoConstants.Labels.METHOD, "Read"); if (op.getQuery() != null) { String queryName = op.getQueryName(); if (queryName == null || queryName.isEmpty()) { // if queryName is not specified, use a hash of the SQL statement string. queryName = String.format("UNNAMED_QUERY#%08x", op.getQuery().getSql().hashCode()); } baseLabels.put( MonitoringInfoConstants.Labels.RESOURCE, GcpResourceIdentifiers.spannerQuery( baseLabels.get(MonitoringInfoConstants.Labels.SPANNER_PROJECT_ID), config.getInstanceId().get(), config.getDatabaseId().get(), queryName)); baseLabels.put(MonitoringInfoConstants.Labels.SPANNER_QUERY_NAME, queryName); } else { baseLabels.put( MonitoringInfoConstants.Labels.RESOURCE, GcpResourceIdentifiers.spannerTable( baseLabels.get(MonitoringInfoConstants.Labels.SPANNER_PROJECT_ID), config.getInstanceId().get(), config.getDatabaseId().get(), op.getTable())); baseLabels.put(MonitoringInfoConstants.Labels.TABLE_ID, op.getTable()); } return new ServiceCallMetric(MonitoringInfoConstants.Urns.API_REQUEST_COUNT, baseLabels); } } /** Implementation of {@link #read}. */ @AutoValue public abstract static class Read extends PTransform<PBegin, PCollection<Struct>> { abstract SpannerConfig getSpannerConfig(); abstract ReadOperation getReadOperation(); abstract @Nullable TimestampBound getTimestampBound(); abstract @Nullable PCollectionView<Transaction> getTransaction(); abstract @Nullable PartitionOptions getPartitionOptions(); abstract Boolean getBatching(); abstract Builder toBuilder(); @AutoValue.Builder abstract static class Builder { abstract Builder setSpannerConfig(SpannerConfig spannerConfig); abstract Builder setReadOperation(ReadOperation readOperation); abstract Builder setTimestampBound(TimestampBound timestampBound); abstract Builder setTransaction(PCollectionView<Transaction> transaction); abstract Builder setPartitionOptions(PartitionOptions partitionOptions); abstract Builder setBatching(Boolean batching); abstract Read build(); } /** Specifies the Cloud Spanner configuration. */ public Read withSpannerConfig(SpannerConfig spannerConfig) { return toBuilder().setSpannerConfig(spannerConfig).build(); } /** Specifies the Cloud Spanner project. */ public Read withProjectId(String projectId) { return withProjectId(ValueProvider.StaticValueProvider.of(projectId)); } /** Specifies the Cloud Spanner project. */ public Read withProjectId(ValueProvider<String> projectId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withProjectId(projectId)); } /** Specifies the Cloud Spanner instance. */ public Read withInstanceId(String instanceId) { return withInstanceId(ValueProvider.StaticValueProvider.of(instanceId)); } /** Specifies the Cloud Spanner instance. */ public Read withInstanceId(ValueProvider<String> instanceId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withInstanceId(instanceId)); } /** Specifies the Cloud Spanner database. */ public Read withDatabaseId(String databaseId) { return withDatabaseId(ValueProvider.StaticValueProvider.of(databaseId)); } /** Specifies the Cloud Spanner database. */ public Read withDatabaseId(ValueProvider<String> databaseId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withDatabaseId(databaseId)); } /** Specifies the Cloud Spanner host. */ public Read withHost(ValueProvider<String> host) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withHost(host)); } public Read withHost(String host) { return withHost(ValueProvider.StaticValueProvider.of(host)); } /** Specifies the Cloud Spanner emulator host. */ public Read withEmulatorHost(ValueProvider<String> emulatorHost) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withEmulatorHost(emulatorHost)); } public Read withEmulatorHost(String emulatorHost) { return withEmulatorHost(ValueProvider.StaticValueProvider.of(emulatorHost)); } /** If true the uses Cloud Spanner batch API. */ public Read withBatching(boolean batching) { return toBuilder().setBatching(batching).build(); } @VisibleForTesting Read withServiceFactory(ServiceFactory<Spanner, SpannerOptions> serviceFactory) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withServiceFactory(serviceFactory)); } public Read withTransaction(PCollectionView<Transaction> transaction) { return toBuilder().setTransaction(transaction).build(); } public Read withTimestamp(Timestamp timestamp) { return withTimestampBound(TimestampBound.ofReadTimestamp(timestamp)); } public Read withTimestampBound(TimestampBound timestampBound) { return toBuilder().setTimestampBound(timestampBound).build(); } public Read withTable(String table) { return withReadOperation(getReadOperation().withTable(table)); } public Read withReadOperation(ReadOperation operation) { return toBuilder().setReadOperation(operation).build(); } public Read withColumns(String... columns) { return withColumns(Arrays.asList(columns)); } public Read withColumns(List<String> columns) { return withReadOperation(getReadOperation().withColumns(columns)); } public Read withQuery(Statement statement) { return withReadOperation(getReadOperation().withQuery(statement)); } public Read withQuery(String sql) { return withQuery(Statement.of(sql)); } public Read withQueryName(String queryName) { return withReadOperation(getReadOperation().withQueryName(queryName)); } public Read withKeySet(KeySet keySet) { return withReadOperation(getReadOperation().withKeySet(keySet)); } public Read withIndex(String index) { return withReadOperation(getReadOperation().withIndex(index)); } public Read withPartitionOptions(PartitionOptions partitionOptions) { return withReadOperation(getReadOperation().withPartitionOptions(partitionOptions)); } public Read withLowPriority() { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withRpcPriority(RpcPriority.LOW)); } public Read withHighPriority() { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withRpcPriority(RpcPriority.HIGH)); } @Override public PCollection<Struct> expand(PBegin input) { getSpannerConfig().validate(); checkArgument( getTimestampBound() != null, "SpannerIO.read() runs in a read only transaction and requires timestamp to be set " + "with withTimestampBound or withTimestamp method"); if (getReadOperation().getQuery() != null) { // TODO: validate query? } else if (getReadOperation().getTable() != null) { // Assume read checkNotNull( getReadOperation().getColumns(), "For a read operation SpannerIO.read() requires a list of " + "columns to set with withColumns method"); checkArgument( !getReadOperation().getColumns().isEmpty(), "For a read operation SpannerIO.read() requires a non-empty" + " list of columns to set with withColumns method"); } else { throw new IllegalArgumentException( "SpannerIO.read() requires configuring query or read operation."); } ReadAll readAll = readAll() .withSpannerConfig(getSpannerConfig()) .withTimestampBound(getTimestampBound()) .withBatching(getBatching()) .withTransaction(getTransaction()); return input.apply(Create.of(getReadOperation())).apply("Execute query", readAll); } SerializableFunction<Struct, Row> getFormatFn() { return (SerializableFunction<Struct, Row>) input -> Row.withSchema(Schema.builder().addInt64Field("Key").build()) .withFieldValue("Key", 3L) .build(); } } static class ReadRows extends PTransform<PBegin, PCollection<Row>> { Read read; Schema schema; public ReadRows(Read read, Schema schema) { super("Read rows"); this.read = read; this.schema = schema; } @Override public PCollection<Row> expand(PBegin input) { return input .apply(read) .apply( MapElements.into(TypeDescriptor.of(Row.class)) .via( (SerializableFunction<Struct, Row>) struct -> StructUtils.structToBeamRow(struct, schema))) .setRowSchema(schema); } } /** * A {@link PTransform} that create a transaction. If applied to a {@link PCollection}, it will * create a transaction after the {@link PCollection} is closed. * * @see SpannerIO * @see Wait */ @AutoValue public abstract static class CreateTransaction extends PTransform<PInput, PCollectionView<Transaction>> { abstract SpannerConfig getSpannerConfig(); abstract @Nullable TimestampBound getTimestampBound(); abstract Builder toBuilder(); @Override public PCollectionView<Transaction> expand(PInput input) { getSpannerConfig().validate(); PCollection<?> collection = input.getPipeline().apply(Create.of(1)); if (input instanceof PCollection) { collection = collection.apply(Wait.on((PCollection<?>) input)); } else if (!(input instanceof PBegin)) { throw new RuntimeException("input must be PBegin or PCollection"); } return collection .apply( "Create transaction", ParDo.of(new CreateTransactionFn(this.getSpannerConfig(), this.getTimestampBound()))) .apply("As PCollectionView", View.asSingleton()); } /** Specifies the Cloud Spanner configuration. */ public CreateTransaction withSpannerConfig(SpannerConfig spannerConfig) { return toBuilder().setSpannerConfig(spannerConfig).build(); } /** Specifies the Cloud Spanner project. */ public CreateTransaction withProjectId(String projectId) { return withProjectId(ValueProvider.StaticValueProvider.of(projectId)); } /** Specifies the Cloud Spanner project. */ public CreateTransaction withProjectId(ValueProvider<String> projectId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withProjectId(projectId)); } /** Specifies the Cloud Spanner instance. */ public CreateTransaction withInstanceId(String instanceId) { return withInstanceId(ValueProvider.StaticValueProvider.of(instanceId)); } /** Specifies the Cloud Spanner instance. */ public CreateTransaction withInstanceId(ValueProvider<String> instanceId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withInstanceId(instanceId)); } /** Specifies the Cloud Spanner database. */ public CreateTransaction withDatabaseId(String databaseId) { return withDatabaseId(ValueProvider.StaticValueProvider.of(databaseId)); } /** Specifies the Cloud Spanner database. */ public CreateTransaction withDatabaseId(ValueProvider<String> databaseId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withDatabaseId(databaseId)); } /** Specifies the Cloud Spanner host. */ public CreateTransaction withHost(ValueProvider<String> host) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withHost(host)); } public CreateTransaction withHost(String host) { return withHost(ValueProvider.StaticValueProvider.of(host)); } /** Specifies the Cloud Spanner emulator host. */ public CreateTransaction withEmulatorHost(ValueProvider<String> emulatorHost) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withEmulatorHost(emulatorHost)); } public CreateTransaction withEmulatorHost(String emulatorHost) { return withEmulatorHost(ValueProvider.StaticValueProvider.of(emulatorHost)); } @VisibleForTesting CreateTransaction withServiceFactory(ServiceFactory<Spanner, SpannerOptions> serviceFactory) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withServiceFactory(serviceFactory)); } public CreateTransaction withTimestampBound(TimestampBound timestampBound) { return toBuilder().setTimestampBound(timestampBound).build(); } /** A builder for {@link CreateTransaction}. */ @AutoValue.Builder public abstract static class Builder { public abstract Builder setSpannerConfig(SpannerConfig spannerConfig); public abstract Builder setTimestampBound(TimestampBound newTimestampBound); public abstract CreateTransaction build(); } } /** A failure handling strategy. */ public enum FailureMode { /** Invalid write to Spanner will cause the pipeline to fail. A default strategy. */ FAIL_FAST, /** Invalid mutations will be returned as part of the result of the write transform. */ REPORT_FAILURES } /** * A {@link PTransform} that writes {@link Mutation} objects to Google Cloud Spanner. * * @see SpannerIO */ @AutoValue public abstract static class Write extends PTransform<PCollection<Mutation>, SpannerWriteResult> { abstract SpannerConfig getSpannerConfig(); abstract long getBatchSizeBytes(); abstract long getMaxNumMutations(); abstract long getMaxNumRows(); abstract FailureMode getFailureMode(); abstract @Nullable PCollection<?> getSchemaReadySignal(); abstract OptionalInt getGroupingFactor(); abstract @Nullable PCollectionView<Dialect> getDialectView(); abstract Builder toBuilder(); @AutoValue.Builder abstract static class Builder { abstract Builder setSpannerConfig(SpannerConfig spannerConfig); abstract Builder setBatchSizeBytes(long batchSizeBytes); abstract Builder setMaxNumMutations(long maxNumMutations); abstract Builder setMaxNumRows(long maxNumRows); abstract Builder setFailureMode(FailureMode failureMode); abstract Builder setSchemaReadySignal(PCollection<?> schemaReadySignal); abstract Builder setGroupingFactor(int groupingFactor); abstract Builder setDialectView(PCollectionView<Dialect> dialect); abstract Write build(); } /** Specifies the Cloud Spanner configuration. */ public Write withSpannerConfig(SpannerConfig spannerConfig) { return toBuilder().setSpannerConfig(spannerConfig).build(); } /** Specifies the Cloud Spanner project. */ public Write withProjectId(String projectId) { return withProjectId(ValueProvider.StaticValueProvider.of(projectId)); } /** Specifies the Cloud Spanner project. */ public Write withProjectId(ValueProvider<String> projectId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withProjectId(projectId)); } /** Specifies the Cloud Spanner instance. */ public Write withInstanceId(String instanceId) { return withInstanceId(ValueProvider.StaticValueProvider.of(instanceId)); } /** Specifies the Cloud Spanner instance. */ public Write withInstanceId(ValueProvider<String> instanceId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withInstanceId(instanceId)); } /** Specifies the Cloud Spanner database. */ public Write withDatabaseId(String databaseId) { return withDatabaseId(ValueProvider.StaticValueProvider.of(databaseId)); } /** Specifies the Cloud Spanner database. */ public Write withDatabaseId(ValueProvider<String> databaseId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withDatabaseId(databaseId)); } /** Specifies the Cloud Spanner host. */ public Write withHost(ValueProvider<String> host) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withHost(host)); } /** Specifies the Cloud Spanner host. */ public Write withHost(String host) { return withHost(ValueProvider.StaticValueProvider.of(host)); } /** Specifies the Cloud Spanner emulator host. */ public Write withEmulatorHost(ValueProvider<String> emulatorHost) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withEmulatorHost(emulatorHost)); } public Write withEmulatorHost(String emulatorHost) { return withEmulatorHost(ValueProvider.StaticValueProvider.of(emulatorHost)); } public Write withDialectView(PCollectionView<Dialect> dialect) { return toBuilder().setDialectView(dialect).build(); } /** * Specifies the deadline for the Commit API call. Default is 15 secs. DEADLINE_EXCEEDED errors * will prompt a backoff/retry until the value of {@link #withMaxCumulativeBackoff(Duration)} is * reached. DEADLINE_EXCEEDED errors are reported with logging and counters. */ public Write withCommitDeadline(Duration commitDeadline) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withCommitDeadline(commitDeadline)); } /** * Specifies the maximum cumulative backoff time when retrying after DEADLINE_EXCEEDED errors. * Default is 15 mins. * * <p>If the mutations still have not been written after this time, they are treated as a * failure, and handled according to the setting of {@link #withFailureMode(FailureMode)}. */ public Write withMaxCumulativeBackoff(Duration maxCumulativeBackoff) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withMaxCumulativeBackoff(maxCumulativeBackoff)); } @VisibleForTesting Write withServiceFactory(ServiceFactory<Spanner, SpannerOptions> serviceFactory) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withServiceFactory(serviceFactory)); } /** Same transform but can be applied to {@link PCollection} of {@link MutationGroup}. */ public WriteGrouped grouped() { return new WriteGrouped(this); } /** * Specifies the batch size limit (max number of bytes mutated per batch). Default value is 1MB */ public Write withBatchSizeBytes(long batchSizeBytes) { return toBuilder().setBatchSizeBytes(batchSizeBytes).build(); } /** Specifies failure mode. {@link FailureMode#FAIL_FAST} mode is selected by default. */ public Write withFailureMode(FailureMode failureMode) { return toBuilder().setFailureMode(failureMode).build(); } /** * Specifies the cell mutation limit (maximum number of mutated cells per batch). Default value * is 5000 */ public Write withMaxNumMutations(long maxNumMutations) { return toBuilder().setMaxNumMutations(maxNumMutations).build(); } /** * Specifies the row mutation limit (maximum number of mutated rows per batch). Default value is * 1000 */ public Write withMaxNumRows(long maxNumRows) { return toBuilder().setMaxNumRows(maxNumRows).build(); } /** * Specifies an optional input PCollection that can be used as the signal for {@link * Wait.OnSignal} to indicate when the database schema is ready to be read. * * <p>To be used when the database schema is created by another section of the pipeline, this * causes this transform to wait until the {@code signal PCollection} has been closed before * reading the schema from the database. * * @see Wait.OnSignal */ public Write withSchemaReadySignal(PCollection<?> signal) { return toBuilder().setSchemaReadySignal(signal).build(); } /** * Specifies the multiple of max mutation (in terms of both bytes per batch and cells per batch) * that is used to select a set of mutations to sort by key for batching. This sort uses local * memory on the workers, so using large values can cause out of memory errors. Default value is * 1000. */ public Write withGroupingFactor(int groupingFactor) { return toBuilder().setGroupingFactor(groupingFactor).build(); } public Write withLowPriority() { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withRpcPriority(RpcPriority.LOW)); } public Write withHighPriority() { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withRpcPriority(RpcPriority.HIGH)); } @Override public SpannerWriteResult expand(PCollection<Mutation> input) { getSpannerConfig().validate(); return input .apply("To mutation group", ParDo.of(new ToMutationGroupFn())) .apply("Write mutations to Cloud Spanner", new WriteGrouped(this)); } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); populateDisplayDataWithParamaters(builder); } private void populateDisplayDataWithParamaters(DisplayData.Builder builder) { getSpannerConfig().populateDisplayData(builder); builder.add( DisplayData.item("batchSizeBytes", getBatchSizeBytes()) .withLabel("Max batch size in bytes")); builder.add( DisplayData.item("maxNumMutations", getMaxNumMutations()) .withLabel("Max number of mutated cells in each batch")); builder.add( DisplayData.item("maxNumRows", getMaxNumRows()) .withLabel("Max number of rows in each batch")); // Grouping factor default value depends on whether it is a batch or streaming pipeline. // This function is not aware of that state, so use 'DEFAULT' if unset. builder.add( DisplayData.item( "groupingFactor", (getGroupingFactor().isPresent() ? Integer.toString(getGroupingFactor().getAsInt()) : "DEFAULT")) .withLabel("Number of batches to sort over")); } } static class WriteRows extends PTransform<PCollection<Row>, PDone> { private final Write write; private final Op operation; private final String table; private WriteRows(Write write, Op operation, String table) { this.write = write; this.operation = operation; this.table = table; } public static WriteRows of(Write write, Op operation, String table) { return new WriteRows(write, operation, table); } @Override public PDone expand(PCollection<Row> input) { input .apply( MapElements.into(TypeDescriptor.of(Mutation.class)) .via(MutationUtils.beamRowToMutationFn(operation, table))) .apply(write); return PDone.in(input.getPipeline()); } } /** Same as {@link Write} but supports grouped mutations. */ public static class WriteGrouped extends PTransform<PCollection<MutationGroup>, SpannerWriteResult> { private final Write spec; private static final TupleTag<MutationGroup> BATCHABLE_MUTATIONS_TAG = new TupleTag<MutationGroup>("batchableMutations") {}; private static final TupleTag<Iterable<MutationGroup>> UNBATCHABLE_MUTATIONS_TAG = new TupleTag<Iterable<MutationGroup>>("unbatchableMutations") {}; private static final TupleTag<Void> MAIN_OUT_TAG = new TupleTag<Void>("mainOut") {}; private static final TupleTag<MutationGroup> FAILED_MUTATIONS_TAG = new TupleTag<MutationGroup>("failedMutations") {}; private static final SerializableCoder<MutationGroup> CODER = SerializableCoder.of(MutationGroup.class); public WriteGrouped(Write spec) { this.spec = spec; } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); spec.populateDisplayDataWithParamaters(builder); } @Override public SpannerWriteResult expand(PCollection<MutationGroup> input) { PCollection<Iterable<MutationGroup>> batches; PCollectionView<Dialect> dialectView = spec.getDialectView(); if (dialectView == null) { dialectView = input .getPipeline() .apply("CreateSingleton", Create.of(Dialect.GOOGLE_STANDARD_SQL)) .apply("As PCollectionView", View.asSingleton()); } if (spec.getBatchSizeBytes() <= 1 || spec.getMaxNumMutations() <= 1 || spec.getMaxNumRows() <= 1) { LOG.info("Batching of mutationGroups is disabled"); TypeDescriptor<Iterable<MutationGroup>> descriptor = new TypeDescriptor<Iterable<MutationGroup>>() {}; batches = input.apply(MapElements.into(descriptor).via(ImmutableList::of)); } else { // First, read the Cloud Spanner schema. PCollection<Void> schemaSeed = input.getPipeline().apply("Create Seed", Create.of((Void) null)); if (spec.getSchemaReadySignal() != null) { // Wait for external signal before reading schema. schemaSeed = schemaSeed.apply("Wait for schema", Wait.on(spec.getSchemaReadySignal())); } final PCollectionView<SpannerSchema> schemaView = schemaSeed .apply( "Read information schema", ParDo.of(new ReadSpannerSchema(spec.getSpannerConfig(), dialectView)) .withSideInputs(dialectView)) .apply("Schema View", View.asSingleton()); // Split the mutations into batchable and unbatchable mutations. // Filter out mutation groups too big to be batched. PCollectionTuple filteredMutations = input .apply( "RewindowIntoGlobal", Window.<MutationGroup>into(new GlobalWindows()) .triggering(DefaultTrigger.of()) .discardingFiredPanes()) .apply( "Filter Unbatchable Mutations", ParDo.of( new BatchableMutationFilterFn( schemaView, UNBATCHABLE_MUTATIONS_TAG, spec.getBatchSizeBytes(), spec.getMaxNumMutations(), spec.getMaxNumRows())) .withSideInputs(schemaView) .withOutputTags( BATCHABLE_MUTATIONS_TAG, TupleTagList.of(UNBATCHABLE_MUTATIONS_TAG))); // Build a set of Mutation groups from the current bundle, // sort them by table/key then split into batches. PCollection<Iterable<MutationGroup>> batchedMutations = filteredMutations .get(BATCHABLE_MUTATIONS_TAG) .apply( "Gather Sort And Create Batches", ParDo.of( new GatherSortCreateBatchesFn( spec.getBatchSizeBytes(), spec.getMaxNumMutations(), spec.getMaxNumRows(), // Do not group on streaming unless explicitly set. spec.getGroupingFactor() .orElse( input.isBounded() == IsBounded.BOUNDED ? DEFAULT_GROUPING_FACTOR : 1), schemaView)) .withSideInputs(schemaView)); // Merge the batched and unbatchable mutation PCollections and write to Spanner. batches = PCollectionList.of(filteredMutations.get(UNBATCHABLE_MUTATIONS_TAG)) .and(batchedMutations) .apply("Merge", Flatten.pCollections()); } PCollectionTuple result = batches.apply( "Write batches to Spanner", ParDo.of( new WriteToSpannerFn( spec.getSpannerConfig(), spec.getFailureMode(), FAILED_MUTATIONS_TAG)) .withOutputTags(MAIN_OUT_TAG, TupleTagList.of(FAILED_MUTATIONS_TAG))); return new SpannerWriteResult( input.getPipeline(), result.get(MAIN_OUT_TAG), result.get(FAILED_MUTATIONS_TAG), FAILED_MUTATIONS_TAG); } @VisibleForTesting static MutationGroup decode(byte[] bytes) { ByteArrayInputStream bis = new ByteArrayInputStream(bytes); try { return CODER.decode(bis); } catch (IOException e) { throw new RuntimeException(e); } } @VisibleForTesting static byte[] encode(MutationGroup g) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); try { CODER.encode(g, bos); } catch (IOException e) { throw new RuntimeException(e); } return bos.toByteArray(); } } @AutoValue public abstract static class ReadChangeStream extends PTransform<PBegin, PCollection<DataChangeRecord>> { abstract SpannerConfig getSpannerConfig(); abstract String getChangeStreamName(); abstract @Nullable String getMetadataInstance(); abstract @Nullable String getMetadataDatabase(); abstract @Nullable String getMetadataTable(); abstract Timestamp getInclusiveStartAt(); abstract @Nullable Timestamp getInclusiveEndAt(); abstract @Nullable RpcPriority getRpcPriority(); /** @deprecated This configuration has no effect, as tracing is not available */ @Deprecated abstract @Nullable Double getTraceSampleProbability(); abstract Builder toBuilder(); @AutoValue.Builder abstract static class Builder { abstract Builder setSpannerConfig(SpannerConfig spannerConfig); abstract Builder setChangeStreamName(String changeStreamName); abstract Builder setMetadataInstance(String metadataInstance); abstract Builder setMetadataDatabase(String metadataDatabase); abstract Builder setMetadataTable(String metadataTable); abstract Builder setInclusiveStartAt(Timestamp inclusiveStartAt); abstract Builder setInclusiveEndAt(Timestamp inclusiveEndAt); abstract Builder setRpcPriority(RpcPriority rpcPriority); abstract Builder setTraceSampleProbability(Double probability); abstract ReadChangeStream build(); } /** Specifies the Cloud Spanner configuration. */ public ReadChangeStream withSpannerConfig(SpannerConfig spannerConfig) { return toBuilder().setSpannerConfig(spannerConfig).build(); } /** Specifies the Cloud Spanner project. */ public ReadChangeStream withProjectId(String projectId) { return withProjectId(ValueProvider.StaticValueProvider.of(projectId)); } /** Specifies the Cloud Spanner project. */ public ReadChangeStream withProjectId(ValueProvider<String> projectId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withProjectId(projectId)); } /** Specifies the Cloud Spanner instance. */ public ReadChangeStream withInstanceId(String instanceId) { return withInstanceId(ValueProvider.StaticValueProvider.of(instanceId)); } /** Specifies the Cloud Spanner instance. */ public ReadChangeStream withInstanceId(ValueProvider<String> instanceId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withInstanceId(instanceId)); } /** Specifies the Cloud Spanner database. */ public ReadChangeStream withDatabaseId(String databaseId) { return withDatabaseId(ValueProvider.StaticValueProvider.of(databaseId)); } /** Specifies the Cloud Spanner database. */ public ReadChangeStream withDatabaseId(ValueProvider<String> databaseId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withDatabaseId(databaseId)); } /** Specifies the change stream name. */ public ReadChangeStream withChangeStreamName(String changeStreamName) { return toBuilder().setChangeStreamName(changeStreamName).build(); } /** Specifies the metadata database. */ public ReadChangeStream withMetadataInstance(String metadataInstance) { return toBuilder().setMetadataInstance(metadataInstance).build(); } /** Specifies the metadata database. */ public ReadChangeStream withMetadataDatabase(String metadataDatabase) { return toBuilder().setMetadataDatabase(metadataDatabase).build(); } /** Specifies the metadata table name. */ public ReadChangeStream withMetadataTable(String metadataTable) { return toBuilder().setMetadataTable(metadataTable).build(); } /** Specifies the time that the change stream should be read from. */ public ReadChangeStream withInclusiveStartAt(Timestamp timestamp) { return toBuilder().setInclusiveStartAt(timestamp).build(); } /** Specifies the end time of the change stream. */ public ReadChangeStream withInclusiveEndAt(Timestamp timestamp) { return toBuilder().setInclusiveEndAt(timestamp).build(); } /** Specifies the priority of the change stream queries. */ public ReadChangeStream withRpcPriority(RpcPriority rpcPriority) { return toBuilder().setRpcPriority(rpcPriority).build(); } /** * Specifies the sample probability of tracing requests. * * @deprecated This configuration has no effect, as tracing is not available. */ @Deprecated public ReadChangeStream withTraceSampleProbability(Double probability) { return toBuilder().setTraceSampleProbability(probability).build(); } @Override public PCollection<DataChangeRecord> expand(PBegin input) { checkArgument( getSpannerConfig() != null, "SpannerIO.readChangeStream() requires the spanner config to be set."); checkArgument( getSpannerConfig().getProjectId() != null, "SpannerIO.readChangeStream() requires the project ID to be set."); checkArgument( getSpannerConfig().getInstanceId() != null, "SpannerIO.readChangeStream() requires the instance ID to be set."); checkArgument( getSpannerConfig().getDatabaseId() != null, "SpannerIO.readChangeStream() requires the database ID to be set."); checkArgument( getChangeStreamName() != null, "SpannerIO.readChangeStream() requires the name of the change stream to be set."); checkArgument( getInclusiveStartAt() != null, "SpannerIO.readChangeStream() requires the start time to be set."); // Inclusive end at is defaulted to ChangeStreamsContants.MAX_INCLUSIVE_END_AT checkArgument( getInclusiveEndAt() != null, "SpannerIO.readChangeStream() requires the end time to be set. If you'd like to process the stream without an end time, you can omit this parameter."); if (getMetadataInstance() != null) { checkArgument( getMetadataDatabase() != null, "SpannerIO.readChangeStream() requires the metadata database to be set if metadata instance is set."); } // Start time must be before end time if (getInclusiveEndAt() != null && getInclusiveStartAt().toSqlTimestamp().after(getInclusiveEndAt().toSqlTimestamp())) { throw new IllegalArgumentException("Start time cannot be after end time."); } final DatabaseId changeStreamDatabaseId = DatabaseId.of( getSpannerConfig().getProjectId().get(), getSpannerConfig().getInstanceId().get(), getSpannerConfig().getDatabaseId().get()); final String partitionMetadataInstanceId = MoreObjects.firstNonNull( getMetadataInstance(), changeStreamDatabaseId.getInstanceId().getInstance()); final String partitionMetadataDatabaseId = MoreObjects.firstNonNull(getMetadataDatabase(), changeStreamDatabaseId.getDatabase()); final String partitionMetadataTableName = MoreObjects.firstNonNull( getMetadataTable(), generatePartitionMetadataTableName(partitionMetadataDatabaseId)); SpannerConfig changeStreamSpannerConfig = getSpannerConfig(); // Set default retryable errors for ReadChangeStream if (changeStreamSpannerConfig.getRetryableCodes() == null) { ImmutableSet<Code> defaultRetryableCodes = ImmutableSet.of(Code.UNAVAILABLE, Code.ABORTED); changeStreamSpannerConfig = changeStreamSpannerConfig.toBuilder().setRetryableCodes(defaultRetryableCodes).build(); } // Set default retry timeouts for ReadChangeStream if (changeStreamSpannerConfig.getExecuteStreamingSqlRetrySettings() == null) { changeStreamSpannerConfig = changeStreamSpannerConfig .toBuilder() .setExecuteStreamingSqlRetrySettings( RetrySettings.newBuilder() .setTotalTimeout(org.threeten.bp.Duration.ofMinutes(5)) .setInitialRpcTimeout(org.threeten.bp.Duration.ofMinutes(1)) .setMaxRpcTimeout(org.threeten.bp.Duration.ofMinutes(1)) .build()) .build(); } final SpannerConfig partitionMetadataSpannerConfig = changeStreamSpannerConfig .toBuilder() .setInstanceId(StaticValueProvider.of(partitionMetadataInstanceId)) .setDatabaseId(StaticValueProvider.of(partitionMetadataDatabaseId)) .build(); final String changeStreamName = getChangeStreamName(); final Timestamp startTimestamp = getInclusiveStartAt(); // Uses (Timestamp.MAX - 1ns) at max for end timestamp, because we add 1ns to transform the // interval into a closed-open in the read change stream restriction (prevents overflow) final Timestamp endTimestamp = getInclusiveEndAt().compareTo(MAX_INCLUSIVE_END_AT) > 0 ? MAX_INCLUSIVE_END_AT : getInclusiveEndAt(); final MapperFactory mapperFactory = new MapperFactory(); final ChangeStreamMetrics metrics = new ChangeStreamMetrics(); final ThroughputEstimator throughputEstimator = new ThroughputEstimator(); final RpcPriority rpcPriority = MoreObjects.firstNonNull(getRpcPriority(), RpcPriority.HIGH); final DaoFactory daoFactory = new DaoFactory( changeStreamSpannerConfig, changeStreamName, partitionMetadataSpannerConfig, partitionMetadataTableName, rpcPriority, input.getPipeline().getOptions().getJobName()); final ActionFactory actionFactory = new ActionFactory(); final InitializeDoFn initializeDoFn = new InitializeDoFn(daoFactory, mapperFactory, startTimestamp, endTimestamp); final DetectNewPartitionsDoFn detectNewPartitionsDoFn = new DetectNewPartitionsDoFn(daoFactory, mapperFactory, actionFactory, metrics); final ReadChangeStreamPartitionDoFn readChangeStreamPartitionDoFn = new ReadChangeStreamPartitionDoFn( daoFactory, mapperFactory, actionFactory, metrics, throughputEstimator); final PostProcessingMetricsDoFn postProcessingMetricsDoFn = new PostProcessingMetricsDoFn(metrics); LOG.info("Partition metadata table that will be used is " + partitionMetadataTableName); input .getPipeline() .getOptions() .as(SpannerChangeStreamOptions.class) .setMetadataTable(partitionMetadataTableName); PCollection<byte[]> impulseOut = input.apply(Impulse.create()); PCollection<DataChangeRecord> results = impulseOut .apply("Initialize the connector", ParDo.of(initializeDoFn)) .apply("Detect new partitions", ParDo.of(detectNewPartitionsDoFn)) .apply("Read change stream partition", ParDo.of(readChangeStreamPartitionDoFn)) .apply("Gather metrics", ParDo.of(postProcessingMetricsDoFn)); impulseOut .apply(WithTimestamps.of(e -> GlobalWindow.INSTANCE.maxTimestamp())) .apply(Wait.on(results)) .apply(ParDo.of(new CleanUpReadChangeStreamDoFn(daoFactory))); return results; } } /** * Interface to display the name of the metadata table on Dataflow UI. This is only used for * internal purpose. This should not be used to pass the name of the metadata table. */ public interface SpannerChangeStreamOptions extends StreamingOptions { /** Returns the name of the metadata table. */ String getMetadataTable(); /** Specifies the name of the metadata table. */ void setMetadataTable(String table); } private static class ToMutationGroupFn extends DoFn<Mutation, MutationGroup> { @ProcessElement public void processElement(ProcessContext c) { Mutation value = c.element(); c.output(MutationGroup.create(value)); } } /** * Gathers a set of mutations together, gets the keys, encodes them to byte[], sorts them and then * outputs the encoded sorted list. * * <p>Testing notes: With very small amounts of data, each mutation group is in a separate bundle, * and as batching and sorting is over the bundle, this effectively means that no batching will * occur, Therefore this DoFn has to be tested in isolation. */ @VisibleForTesting static class GatherSortCreateBatchesFn extends DoFn<MutationGroup, Iterable<MutationGroup>> { private final long maxBatchSizeBytes; private final long maxBatchNumMutations; private final long maxBatchNumRows; private final long maxSortableSizeBytes; private final long maxSortableNumMutations; private final long maxSortableNumRows; private final PCollectionView<SpannerSchema> schemaView; private final ArrayList<MutationGroupContainer> mutationsToSort = new ArrayList<>(); // total size of MutationGroups in mutationsToSort. private long sortableSizeBytes = 0; // total number of mutated cells in mutationsToSort private long sortableNumCells = 0; // total number of rows mutated in mutationsToSort private long sortableNumRows = 0; GatherSortCreateBatchesFn( long maxBatchSizeBytes, long maxNumMutations, long maxNumRows, long groupingFactor, PCollectionView<SpannerSchema> schemaView) { this.maxBatchSizeBytes = maxBatchSizeBytes; this.maxBatchNumMutations = maxNumMutations; this.maxBatchNumRows = maxNumRows; if (groupingFactor <= 0) { groupingFactor = 1; } this.maxSortableSizeBytes = maxBatchSizeBytes * groupingFactor; this.maxSortableNumMutations = maxNumMutations * groupingFactor; this.maxSortableNumRows = maxNumRows * groupingFactor; this.schemaView = schemaView; initSorter(); } private synchronized void initSorter() { mutationsToSort.clear(); sortableSizeBytes = 0; sortableNumCells = 0; sortableNumRows = 0; } @FinishBundle public synchronized void finishBundle(FinishBundleContext c) throws Exception { sortAndOutputBatches(new OutputReceiverForFinishBundle(c)); } private synchronized void sortAndOutputBatches(OutputReceiver<Iterable<MutationGroup>> out) throws IOException { try { if (mutationsToSort.isEmpty()) { // nothing to output. return; } if (maxSortableNumMutations == maxBatchNumMutations) { // no grouping is occurring, no need to sort and make batches, just output what we have. outputBatch(out, 0, mutationsToSort.size()); return; } // Sort then split the sorted mutations into batches. mutationsToSort.sort(Comparator.naturalOrder()); int batchStart = 0; int batchEnd = 0; // total size of the current batch. long batchSizeBytes = 0; // total number of mutated cells. long batchCells = 0; // total number of rows mutated. long batchRows = 0; // collect and output batches. while (batchEnd < mutationsToSort.size()) { MutationGroupContainer mg = mutationsToSort.get(batchEnd); if (((batchCells + mg.numCells) > maxBatchNumMutations) || ((batchSizeBytes + mg.sizeBytes) > maxBatchSizeBytes || (batchRows + mg.numRows > maxBatchNumRows))) { // Cannot add new element, current batch is full; output. outputBatch(out, batchStart, batchEnd); batchStart = batchEnd; batchSizeBytes = 0; batchCells = 0; batchRows = 0; } batchEnd++; batchSizeBytes += mg.sizeBytes; batchCells += mg.numCells; batchRows += mg.numRows; } if (batchStart < batchEnd) { // output remaining elements outputBatch(out, batchStart, mutationsToSort.size()); } } finally { initSorter(); } } private void outputBatch( OutputReceiver<Iterable<MutationGroup>> out, int batchStart, int batchEnd) { out.output( mutationsToSort.subList(batchStart, batchEnd).stream() .map(o -> o.mutationGroup) .collect(toList())); } @ProcessElement public synchronized void processElement( ProcessContext c, OutputReceiver<Iterable<MutationGroup>> out) throws Exception { SpannerSchema spannerSchema = c.sideInput(schemaView); MutationKeyEncoder encoder = new MutationKeyEncoder(spannerSchema); MutationGroup mg = c.element(); long groupSize = MutationSizeEstimator.sizeOf(mg); long groupCells = MutationCellCounter.countOf(spannerSchema, mg); long groupRows = mg.size(); synchronized (this) { if (((sortableNumCells + groupCells) > maxSortableNumMutations) || (sortableSizeBytes + groupSize) > maxSortableSizeBytes || (sortableNumRows + groupRows) > maxSortableNumRows) { sortAndOutputBatches(out); } mutationsToSort.add( new MutationGroupContainer( mg, groupSize, groupCells, groupRows, encoder.encodeTableNameAndKey(mg.primary()))); sortableSizeBytes += groupSize; sortableNumCells += groupCells; sortableNumRows += groupRows; } } // Container class to store a MutationGroup, its sortable encoded key and its statistics. private static final class MutationGroupContainer implements Comparable<MutationGroupContainer> { final MutationGroup mutationGroup; final long sizeBytes; final long numCells; final long numRows; final byte[] encodedKey; MutationGroupContainer( MutationGroup mutationGroup, long sizeBytes, long numCells, long numRows, byte[] encodedKey) { this.mutationGroup = mutationGroup; this.sizeBytes = sizeBytes; this.numCells = numCells; this.numRows = numRows; this.encodedKey = encodedKey; } @Override public int compareTo(MutationGroupContainer o) { return UnsignedBytes.lexicographicalComparator().compare(this.encodedKey, o.encodedKey); } } // TODO(BEAM-1287): Remove this when FinishBundle has added support for an {@link // OutputReceiver} private static class OutputReceiverForFinishBundle implements OutputReceiver<Iterable<MutationGroup>> { private final FinishBundleContext c; OutputReceiverForFinishBundle(FinishBundleContext c) { this.c = c; } @Override public void output(Iterable<MutationGroup> output) { outputWithTimestamp(output, Instant.now()); } @Override public void outputWithTimestamp(Iterable<MutationGroup> output, Instant timestamp) { c.output(output, timestamp, GlobalWindow.INSTANCE); } } } /** * Filters MutationGroups larger than the batch size to the output tagged with {@code * UNBATCHABLE_MUTATIONS_TAG}. * * <p>Testing notes: As batching does not occur during full pipline testing, this DoFn must be * tested in isolation. */ @VisibleForTesting static class BatchableMutationFilterFn extends DoFn<MutationGroup, MutationGroup> { private final PCollectionView<SpannerSchema> schemaView; private final TupleTag<Iterable<MutationGroup>> unbatchableMutationsTag; private final long batchSizeBytes; private final long maxNumMutations; private final long maxNumRows; private final Counter batchableMutationGroupsCounter = Metrics.counter(WriteGrouped.class, "batchable_mutation_groups"); private final Counter unBatchableMutationGroupsCounter = Metrics.counter(WriteGrouped.class, "unbatchable_mutation_groups"); BatchableMutationFilterFn( PCollectionView<SpannerSchema> schemaView, TupleTag<Iterable<MutationGroup>> unbatchableMutationsTag, long batchSizeBytes, long maxNumMutations, long maxNumRows) { this.schemaView = schemaView; this.unbatchableMutationsTag = unbatchableMutationsTag; this.batchSizeBytes = batchSizeBytes; this.maxNumMutations = maxNumMutations; this.maxNumRows = maxNumRows; } @ProcessElement public void processElement(ProcessContext c) { MutationGroup mg = c.element(); if (mg.primary().getOperation() == Op.DELETE && !isPointDelete(mg.primary())) { // Ranged deletes are not batchable. c.output(unbatchableMutationsTag, Arrays.asList(mg)); unBatchableMutationGroupsCounter.inc(); return; } SpannerSchema spannerSchema = c.sideInput(schemaView); long groupSize = MutationSizeEstimator.sizeOf(mg); long groupCells = MutationCellCounter.countOf(spannerSchema, mg); long groupRows = Iterables.size(mg); if (groupSize >= batchSizeBytes || groupCells >= maxNumMutations || groupRows >= maxNumRows) { c.output(unbatchableMutationsTag, Arrays.asList(mg)); unBatchableMutationGroupsCounter.inc(); } else { c.output(mg); batchableMutationGroupsCounter.inc(); } } } @VisibleForTesting static class WriteToSpannerFn extends DoFn<Iterable<MutationGroup>, Void> { private final SpannerConfig spannerConfig; private final FailureMode failureMode; // SpannerAccessor can not be serialized so must be initialized at runtime in setup(). private transient SpannerAccessor spannerAccessor; /* Number of times an aborted write to spanner could be retried */ private static final int ABORTED_RETRY_ATTEMPTS = 5; /* Error string in Aborted exception during schema change */ private final String errString = "Transaction aborted. " + "Database schema probably changed during transaction, retry may succeed."; @VisibleForTesting static Sleeper sleeper = Sleeper.DEFAULT; private final Counter mutationGroupBatchesReceived = Metrics.counter(WriteGrouped.class, "mutation_group_batches_received"); private final Counter mutationGroupBatchesWriteSuccess = Metrics.counter(WriteGrouped.class, "mutation_group_batches_write_success"); private final Counter mutationGroupBatchesWriteFail = Metrics.counter(WriteGrouped.class, "mutation_group_batches_write_fail"); private final Counter mutationGroupsReceived = Metrics.counter(WriteGrouped.class, "mutation_groups_received"); private final Counter mutationGroupsWriteSuccess = Metrics.counter(WriteGrouped.class, "mutation_groups_write_success"); private final Counter mutationGroupsWriteFail = Metrics.counter(WriteGrouped.class, "mutation_groups_write_fail"); private final Counter spannerWriteSuccess = Metrics.counter(WriteGrouped.class, "spanner_write_success"); private final Counter spannerWriteFail = Metrics.counter(WriteGrouped.class, "spanner_write_fail"); private final Distribution spannerWriteLatency = Metrics.distribution(WriteGrouped.class, "spanner_write_latency_ms"); private final Counter spannerWriteTimeouts = Metrics.counter(WriteGrouped.class, "spanner_write_timeouts"); private final Counter spannerWriteRetries = Metrics.counter(WriteGrouped.class, "spanner_write_retries"); private final TupleTag<MutationGroup> failedTag; // Fluent Backoff is not serializable so create at runtime in setup(). private transient FluentBackoff bundleWriteBackoff; private transient LoadingCache<String, ServiceCallMetric> writeMetricsByTableName; WriteToSpannerFn( SpannerConfig spannerConfig, FailureMode failureMode, TupleTag<MutationGroup> failedTag) { this.spannerConfig = spannerConfig; this.failureMode = failureMode; this.failedTag = failedTag; } @Setup public void setup() { spannerAccessor = SpannerAccessor.getOrCreate(spannerConfig); bundleWriteBackoff = FluentBackoff.DEFAULT .withMaxCumulativeBackoff(spannerConfig.getMaxCumulativeBackoff().get()) .withInitialBackoff(spannerConfig.getMaxCumulativeBackoff().get().dividedBy(60)); // Use a LoadingCache for metrics as there can be different tables being written to which // result in different service call metrics labels. ServiceCallMetric items are created // on-demand and added to the cache. writeMetricsByTableName = CacheBuilder.newBuilder() .maximumSize(METRICS_CACHE_SIZE) .build( new CacheLoader<String, ServiceCallMetric>() { @Override public ServiceCallMetric load(String tableName) { return buildWriteServiceCallMetric(spannerConfig, tableName); } }); } @Teardown public void teardown() { spannerAccessor.close(); } @ProcessElement public void processElement(ProcessContext c) throws Exception { List<MutationGroup> mutations = ImmutableList.copyOf(c.element()); // Batch upsert rows. try { mutationGroupBatchesReceived.inc(); mutationGroupsReceived.inc(mutations.size()); Iterable<Mutation> batch = Iterables.concat(mutations); writeMutations(batch); mutationGroupBatchesWriteSuccess.inc(); mutationGroupsWriteSuccess.inc(mutations.size()); return; } catch (SpannerException e) { mutationGroupBatchesWriteFail.inc(); if (failureMode == FailureMode.REPORT_FAILURES) { // fall through and retry individual mutationGroups. } else if (failureMode == FailureMode.FAIL_FAST) { mutationGroupsWriteFail.inc(mutations.size()); throw e; } else { throw new IllegalArgumentException("Unknown failure mode " + failureMode); } } // If we are here, writing a batch has failed, retry individual mutations. for (MutationGroup mg : mutations) { try { spannerWriteRetries.inc(); writeMutations(mg); mutationGroupsWriteSuccess.inc(); } catch (SpannerException e) { mutationGroupsWriteFail.inc(); LOG.warn("Failed to write the mutation group: " + mg, e); c.output(failedTag, mg); } } } /* Spanner aborts all inflight transactions during a schema change. Client is expected to retry silently. These must not be counted against retry backoff. */ private void spannerWriteWithRetryIfSchemaChange(List<Mutation> batch) throws SpannerException { for (int retry = 1; ; retry++) { try { if (spannerConfig.getRpcPriority() != null && spannerConfig.getRpcPriority().get() != null) { spannerAccessor .getDatabaseClient() .writeAtLeastOnceWithOptions( batch, Options.priority(spannerConfig.getRpcPriority().get())); } else { spannerAccessor.getDatabaseClient().writeAtLeastOnce(batch); } reportServiceCallMetricsForBatch(batch, "ok"); return; } catch (AbortedException e) { reportServiceCallMetricsForBatch(batch, e.getErrorCode().getGrpcStatusCode().toString()); if (retry >= ABORTED_RETRY_ATTEMPTS) { throw e; } if (e.isRetryable() || e.getMessage().contains(errString)) { continue; } throw e; } catch (SpannerException e) { reportServiceCallMetricsForBatch(batch, e.getErrorCode().getGrpcStatusCode().toString()); throw e; } } } private void reportServiceCallMetricsForBatch(List<Mutation> batch, String statusCode) { // Get names of all tables in batch of mutations. Set<String> tableNames = batch.stream().map(Mutation::getTable).collect(Collectors.toSet()); for (String tableName : tableNames) { writeMetricsByTableName.getUnchecked(tableName).call(statusCode); } } private static ServiceCallMetric buildWriteServiceCallMetric( SpannerConfig config, String tableId) { HashMap<String, String> baseLabels = buildServiceCallMetricLabels(config); baseLabels.put(MonitoringInfoConstants.Labels.METHOD, "Write"); baseLabels.put( MonitoringInfoConstants.Labels.RESOURCE, GcpResourceIdentifiers.spannerTable( baseLabels.get(MonitoringInfoConstants.Labels.SPANNER_PROJECT_ID), config.getInstanceId().get(), config.getDatabaseId().get(), tableId)); baseLabels.put(MonitoringInfoConstants.Labels.TABLE_ID, tableId); return new ServiceCallMetric(MonitoringInfoConstants.Urns.API_REQUEST_COUNT, baseLabels); } /** Write the Mutations to Spanner, handling DEADLINE_EXCEEDED with backoff/retries. */ private void writeMutations(Iterable<Mutation> mutationIterable) throws SpannerException, IOException { BackOff backoff = bundleWriteBackoff.backoff(); List<Mutation> mutations = ImmutableList.copyOf(mutationIterable); while (true) { Stopwatch timer = Stopwatch.createStarted(); // loop is broken on success, timeout backoff/retry attempts exceeded, or other failure. try { spannerWriteWithRetryIfSchemaChange(mutations); spannerWriteSuccess.inc(); return; } catch (SpannerException exception) { if (exception.getErrorCode() == ErrorCode.DEADLINE_EXCEEDED) { spannerWriteTimeouts.inc(); // Potentially backoff/retry after DEADLINE_EXCEEDED. long sleepTimeMsecs = backoff.nextBackOffMillis(); if (sleepTimeMsecs == BackOff.STOP) { LOG.error( "DEADLINE_EXCEEDED writing batch of {} mutations to Cloud Spanner. " + "Aborting after too many retries.", mutations.size()); spannerWriteFail.inc(); throw exception; } LOG.info( "DEADLINE_EXCEEDED writing batch of {} mutations to Cloud Spanner, " + "retrying after backoff of {}ms\n" + "({})", mutations.size(), sleepTimeMsecs, exception.getMessage()); spannerWriteRetries.inc(); try { sleeper.sleep(sleepTimeMsecs); } catch (InterruptedException e) { // ignore. } } else { // Some other failure: pass up the stack. spannerWriteFail.inc(); throw exception; } } finally { spannerWriteLatency.update(timer.elapsed(TimeUnit.MILLISECONDS)); } } } } private SpannerIO() {} // Prevent construction. private static HashMap<String, String> buildServiceCallMetricLabels(SpannerConfig config) { HashMap<String, String> baseLabels = new HashMap<>(); baseLabels.put(MonitoringInfoConstants.Labels.PTRANSFORM, ""); baseLabels.put(MonitoringInfoConstants.Labels.SERVICE, "Spanner"); baseLabels.put( MonitoringInfoConstants.Labels.SPANNER_PROJECT_ID, config.getProjectId() == null || config.getProjectId().get() == null || config.getProjectId().get().isEmpty() ? SpannerOptions.getDefaultProjectId() : config.getProjectId().get()); baseLabels.put( MonitoringInfoConstants.Labels.SPANNER_INSTANCE_ID, config.getInstanceId().get()); baseLabels.put( MonitoringInfoConstants.Labels.SPANNER_DATABASE_ID, config.getDatabaseId().get()); return baseLabels; } }