text
stringlengths
7
1.01M
package com.vitanov.multiimagepicker; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Color; import android.graphics.Matrix; import android.content.ContentUris; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.provider.MediaStore; import android.provider.OpenableColumns; import android.text.TextUtils; import android.media.ThumbnailUtils; import android.media.MediaMetadataRetriever; import android.webkit.MimeTypeMap; import android.content.ContentResolver; import java.net.URLConnection; import androidx.annotation.NonNull; import androidx.core.content.ContextCompat; import androidx.exifinterface.media.ExifInterface; import com.esafirm.imagepicker.R; import com.esafirm.imagepicker.features.ImagePicker; import com.esafirm.imagepicker.features.ReturnMode; import com.esafirm.imagepicker.features.ImagePickerConfig; import com.esafirm.imagepicker.features.ImagePickerFragment; import com.esafirm.imagepicker.features.ImagePickerInteractionListener; import com.esafirm.imagepicker.features.cameraonly.CameraOnlyConfig; import com.esafirm.imagepicker.helper.ConfigUtils; import com.esafirm.imagepicker.helper.IpLogger; import com.esafirm.imagepicker.helper.LocaleManager; import com.esafirm.imagepicker.helper.ViewUtils; import com.esafirm.imagepicker.model.Image; import android.content.pm.ActivityInfo; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.lang.ref.WeakReference; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import io.flutter.embedding.engine.plugins.FlutterPlugin; import io.flutter.embedding.engine.plugins.activity.ActivityAware; import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding; import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.MethodCall; import io.flutter.plugin.common.MethodChannel; import io.flutter.plugin.common.MethodChannel.MethodCallHandler; import io.flutter.plugin.common.MethodChannel.Result; import io.flutter.plugin.common.PluginRegistry; import io.flutter.plugin.common.PluginRegistry.Registrar; import static android.media.ThumbnailUtils.OPTIONS_RECYCLE_INPUT; /** * MultiImagePickerPlugin */ public class MultiImagePickerPlugin implements FlutterPlugin, ActivityAware, MethodCallHandler, PluginRegistry.ActivityResultListener { private static final String CHANNEL_NAME = "multi_image_picker"; private static final String REQUEST_THUMBNAIL = "requestThumbnail"; private static final String REQUEST_ORIGINAL = "requestOriginal"; private static final String REQUEST_METADATA = "requestMetadata"; private static final String PICK_IMAGES = "pickImages"; private static final String MAX_IMAGES = "maxImages"; private static final String SELECTED_ASSETS = "selectedAssets"; private static final String GALLERY_MODE = "galleryMode"; private static final String ANDROID_OPTIONS = "androidOptions"; private static final int REQUEST_CODE_CHOOSE = 1001; private MethodChannel channel; private Activity activity; private Context context; private BinaryMessenger messenger; private Result pendingResult; private MethodCall methodCall; /** * Plugin registration. */ public static void registerWith(Registrar registrar) { MultiImagePickerPlugin instance = new MultiImagePickerPlugin(); instance.onAttachedToEngine(registrar.context(), registrar.messenger(), registrar.activity()); registrar.addActivityResultListener(instance); } private void onAttachedToEngine(Context applicationContext, BinaryMessenger binaryMessenger, Activity activity) { context = applicationContext; messenger = binaryMessenger; if (activity != null) { this.activity = activity; } channel = new MethodChannel(binaryMessenger, CHANNEL_NAME); channel.setMethodCallHandler(this); } @Override public void onAttachedToEngine(FlutterPluginBinding binding) { onAttachedToEngine(binding.getApplicationContext(), binding.getBinaryMessenger(), null); } @Override public void onDetachedFromEngine(FlutterPluginBinding binding) { context = null; if (channel != null) { channel.setMethodCallHandler(null); channel = null; } messenger = null; } @Override public void onAttachedToActivity(ActivityPluginBinding binding) { binding.addActivityResultListener(this); activity = binding.getActivity(); } @Override public void onDetachedFromActivity() { activity = null; } @Override public void onDetachedFromActivityForConfigChanges() { activity = null; } @Override public void onReattachedToActivityForConfigChanges(ActivityPluginBinding binding) { binding.addActivityResultListener(this); activity = binding.getActivity(); } private static class GetThumbnailTask extends AsyncTask<String, Void, ByteBuffer> { private WeakReference<Activity> activityReference; BinaryMessenger messenger; final String identifier; final int width; final int height; final int quality; GetThumbnailTask(Activity context, BinaryMessenger messenger, String identifier, int width, int height, int quality) { super(); this.messenger = messenger; this.identifier = identifier; this.width = width; this.height = height; this.quality = quality; this.activityReference = new WeakReference<>(context); } @Override protected ByteBuffer doInBackground(String... strings) { final Uri uri = Uri.parse(this.identifier); byte[] byteArray = null; try { // get a reference to the activity if it is still there Activity activity = activityReference.get(); if (activity == null || activity.isFinishing()) return null; Bitmap sourceBitmap = getCorrectlyOrientedImage(activity, uri); Bitmap bitmap = ThumbnailUtils.extractThumbnail(sourceBitmap, this.width, this.height, OPTIONS_RECYCLE_INPUT); if (bitmap == null) return null; ByteArrayOutputStream bitmapStream = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.JPEG, this.quality, bitmapStream); byteArray = bitmapStream.toByteArray(); bitmap.recycle(); bitmapStream.close(); } catch (IOException e) { e.printStackTrace(); } final ByteBuffer buffer; if (byteArray != null) { buffer = ByteBuffer.allocateDirect(byteArray.length); buffer.put(byteArray); return buffer; } return null; } @Override protected void onPostExecute(ByteBuffer buffer) { super.onPostExecute(buffer); if (buffer != null) { this.messenger.send("multi_image_picker/image/" + this.identifier + ".thumb", buffer); buffer.clear(); } } } private static class GetImageTask extends AsyncTask<String, Void, ByteBuffer> { private final WeakReference<Activity> activityReference; final BinaryMessenger messenger; final String identifier; final int quality; GetImageTask(Activity context, BinaryMessenger messenger, String identifier, int quality) { super(); this.messenger = messenger; this.identifier = identifier; this.quality = quality; this.activityReference = new WeakReference<>(context); } @Override protected ByteBuffer doInBackground(String... strings) { final Uri uri = Uri.parse(this.identifier); byte[] bytesArray = null; try { // get a reference to the activity if it is still there Activity activity = activityReference.get(); if (activity == null || activity.isFinishing()) return null; Bitmap bitmap = getCorrectlyOrientedImage(activity, uri); if (bitmap == null) return null; ByteArrayOutputStream bitmapStream = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.JPEG, this.quality, bitmapStream); bytesArray = bitmapStream.toByteArray(); bitmap.recycle(); bitmapStream.close(); } catch (IOException e) { e.printStackTrace(); } assert bytesArray != null; final ByteBuffer buffer = ByteBuffer.allocateDirect(bytesArray.length); buffer.put(bytesArray); return buffer; } @Override protected void onPostExecute(ByteBuffer buffer) { super.onPostExecute(buffer); this.messenger.send("multi_image_picker/image/" + this.identifier + ".original", buffer); buffer.clear(); } } @Override public void onMethodCall(final MethodCall call, final Result result) { if (!setPendingMethodCallAndResult(call, result)) { finishWithAlreadyActiveError(result); return; } if (PICK_IMAGES.equals(call.method)) { final HashMap<String, String> options = call.argument(ANDROID_OPTIONS); int maxImages = (int) this.methodCall.argument(MAX_IMAGES); int galleryMode = (int) this.methodCall.argument(GALLERY_MODE); ArrayList<String> selectedAssets = this.methodCall.argument(SELECTED_ASSETS); presentPicker(maxImages, galleryMode, selectedAssets, options); } else if (REQUEST_ORIGINAL.equals(call.method)) { final String identifier = call.argument("identifier"); final int quality = (int) call.argument("quality"); if (!this.uriExists(identifier)) { finishWithError("ASSET_DOES_NOT_EXIST", "The requested image does not exist."); } else { GetImageTask task = new GetImageTask(this.activity, this.messenger, identifier, quality); task.execute(); finishWithSuccess(); } } else if (REQUEST_THUMBNAIL.equals(call.method)) { final String identifier = call.argument("identifier"); final int width = (int) call.argument("width"); final int height = (int) call.argument("height"); final int quality = (int) call.argument("quality"); if (!this.uriExists(identifier)) { finishWithError("ASSET_DOES_NOT_EXIST", "The requested image does not exist."); } else { GetThumbnailTask task = new GetThumbnailTask(this.activity, this.messenger, identifier, width, height, quality); task.execute(); finishWithSuccess(); } } else if (REQUEST_METADATA.equals(call.method)) { final String identifier = call.argument("identifier"); Uri uri = Uri.parse(identifier); // Scoped storage related code. We can only get gps location if we ask for original image if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { uri = MediaStore.setRequireOriginal(uri); } try { InputStream in = context.getContentResolver().openInputStream(uri); assert in != null; ExifInterface exifInterface = new ExifInterface(in); finishWithSuccess(getPictureExif(exifInterface, uri)); } catch (IOException e) { finishWithError("Exif error", e.toString()); } } else { pendingResult.notImplemented(); clearMethodCallAndResult(); } } private HashMap<String, Object> getPictureExif(ExifInterface exifInterface, Uri uri) { HashMap<String, Object> result = new HashMap<>(); // API LEVEL 24 String[] tags_str = { ExifInterface.TAG_DATETIME, ExifInterface.TAG_GPS_DATESTAMP, ExifInterface.TAG_GPS_LATITUDE_REF, ExifInterface.TAG_GPS_LONGITUDE_REF, ExifInterface.TAG_GPS_PROCESSING_METHOD, ExifInterface.TAG_IMAGE_WIDTH, ExifInterface.TAG_IMAGE_LENGTH, ExifInterface.TAG_MAKE, ExifInterface.TAG_MODEL }; String[] tags_double = { ExifInterface.TAG_APERTURE_VALUE, ExifInterface.TAG_FLASH, ExifInterface.TAG_FOCAL_LENGTH, ExifInterface.TAG_GPS_ALTITUDE, ExifInterface.TAG_GPS_ALTITUDE_REF, ExifInterface.TAG_GPS_LONGITUDE, ExifInterface.TAG_GPS_LATITUDE, ExifInterface.TAG_IMAGE_LENGTH, ExifInterface.TAG_IMAGE_WIDTH, ExifInterface.TAG_ISO_SPEED, ExifInterface.TAG_ORIENTATION, ExifInterface.TAG_WHITE_BALANCE, ExifInterface.TAG_EXPOSURE_TIME }; HashMap<String, Object> exif_str = getExif_str(exifInterface, tags_str); result.putAll(exif_str); HashMap<String, Object> exif_double = getExif_double(exifInterface, tags_double); result.putAll(exif_double); // A Temp fix while location data is not returned from the exifInterface due to the errors. It also // covers Android >= 10 not loading GPS information from getExif_double if (exif_double.isEmpty() || !exif_double.containsKey(ExifInterface.TAG_GPS_LATITUDE) || !exif_double.containsKey(ExifInterface.TAG_GPS_LONGITUDE)) { if (uri != null) { HashMap<String, Object> hotfix_map = Build.VERSION.SDK_INT < Build.VERSION_CODES.Q ? getLatLng(uri) : getLatLng(exifInterface, uri); result.putAll(hotfix_map); } } if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) { String[] tags_23 = { ExifInterface.TAG_DATETIME_DIGITIZED, ExifInterface.TAG_SUBSEC_TIME, ExifInterface.TAG_SUBSEC_TIME_DIGITIZED, ExifInterface.TAG_SUBSEC_TIME_ORIGINAL }; HashMap<String, Object> exif23 = getExif_str(exifInterface, tags_23); result.putAll(exif23); } if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) { String[] tags_24_str = { ExifInterface.TAG_ARTIST, ExifInterface.TAG_CFA_PATTERN, ExifInterface.TAG_COMPONENTS_CONFIGURATION, ExifInterface.TAG_COPYRIGHT, ExifInterface.TAG_DATETIME_ORIGINAL, ExifInterface.TAG_DEVICE_SETTING_DESCRIPTION, ExifInterface.TAG_EXIF_VERSION, ExifInterface.TAG_FILE_SOURCE, ExifInterface.TAG_FLASHPIX_VERSION, ExifInterface.TAG_GPS_AREA_INFORMATION, ExifInterface.TAG_GPS_DEST_BEARING_REF, ExifInterface.TAG_GPS_DEST_DISTANCE_REF, ExifInterface.TAG_GPS_DEST_LATITUDE_REF, ExifInterface.TAG_GPS_DEST_LONGITUDE_REF, ExifInterface.TAG_GPS_IMG_DIRECTION_REF, ExifInterface.TAG_GPS_MAP_DATUM, ExifInterface.TAG_GPS_MEASURE_MODE, ExifInterface.TAG_GPS_SATELLITES, ExifInterface.TAG_GPS_SPEED_REF, ExifInterface.TAG_GPS_STATUS, ExifInterface.TAG_GPS_TRACK_REF, ExifInterface.TAG_GPS_VERSION_ID, ExifInterface.TAG_IMAGE_DESCRIPTION, ExifInterface.TAG_IMAGE_UNIQUE_ID, ExifInterface.TAG_INTEROPERABILITY_INDEX, ExifInterface.TAG_MAKER_NOTE, ExifInterface.TAG_OECF, ExifInterface.TAG_RELATED_SOUND_FILE, ExifInterface.TAG_SCENE_TYPE, ExifInterface.TAG_SOFTWARE, ExifInterface.TAG_SPATIAL_FREQUENCY_RESPONSE, ExifInterface.TAG_SPECTRAL_SENSITIVITY, ExifInterface.TAG_SUBSEC_TIME_DIGITIZED, ExifInterface.TAG_SUBSEC_TIME_ORIGINAL, ExifInterface.TAG_USER_COMMENT }; String[] tags24_double = { ExifInterface.TAG_APERTURE_VALUE, ExifInterface.TAG_BITS_PER_SAMPLE, ExifInterface.TAG_BRIGHTNESS_VALUE, ExifInterface.TAG_COLOR_SPACE, ExifInterface.TAG_COMPRESSED_BITS_PER_PIXEL, ExifInterface.TAG_COMPRESSION, ExifInterface.TAG_CONTRAST, ExifInterface.TAG_CUSTOM_RENDERED, ExifInterface.TAG_DIGITAL_ZOOM_RATIO, ExifInterface.TAG_EXPOSURE_BIAS_VALUE, ExifInterface.TAG_EXPOSURE_INDEX, ExifInterface.TAG_EXPOSURE_MODE, ExifInterface.TAG_EXPOSURE_PROGRAM, ExifInterface.TAG_FLASH_ENERGY, ExifInterface.TAG_FOCAL_LENGTH_IN_35MM_FILM, ExifInterface.TAG_FOCAL_PLANE_RESOLUTION_UNIT, ExifInterface.TAG_FOCAL_PLANE_X_RESOLUTION, ExifInterface.TAG_FOCAL_PLANE_Y_RESOLUTION, ExifInterface.TAG_F_NUMBER, ExifInterface.TAG_GAIN_CONTROL, ExifInterface.TAG_GPS_DEST_BEARING, ExifInterface.TAG_GPS_DEST_DISTANCE, ExifInterface.TAG_GPS_DEST_LATITUDE, ExifInterface.TAG_GPS_DEST_LONGITUDE, ExifInterface.TAG_GPS_DIFFERENTIAL, ExifInterface.TAG_GPS_DOP, ExifInterface.TAG_GPS_IMG_DIRECTION, ExifInterface.TAG_GPS_SPEED, ExifInterface.TAG_GPS_TRACK, ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT, ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT_LENGTH, ExifInterface.TAG_LIGHT_SOURCE, ExifInterface.TAG_MAX_APERTURE_VALUE, ExifInterface.TAG_METERING_MODE, ExifInterface.TAG_PHOTOMETRIC_INTERPRETATION, ExifInterface.TAG_PIXEL_X_DIMENSION, ExifInterface.TAG_PIXEL_Y_DIMENSION, ExifInterface.TAG_PLANAR_CONFIGURATION, ExifInterface.TAG_PRIMARY_CHROMATICITIES, ExifInterface.TAG_REFERENCE_BLACK_WHITE, ExifInterface.TAG_RESOLUTION_UNIT, ExifInterface.TAG_ROWS_PER_STRIP, ExifInterface.TAG_SAMPLES_PER_PIXEL, ExifInterface.TAG_SATURATION, ExifInterface.TAG_SCENE_CAPTURE_TYPE, ExifInterface.TAG_SENSING_METHOD, ExifInterface.TAG_SHARPNESS, ExifInterface.TAG_SHUTTER_SPEED_VALUE, ExifInterface.TAG_STRIP_BYTE_COUNTS, ExifInterface.TAG_STRIP_OFFSETS, ExifInterface.TAG_SUBJECT_AREA, ExifInterface.TAG_SUBJECT_DISTANCE, ExifInterface.TAG_SUBJECT_DISTANCE_RANGE, ExifInterface.TAG_SUBJECT_LOCATION, ExifInterface.TAG_THUMBNAIL_IMAGE_LENGTH, ExifInterface.TAG_THUMBNAIL_IMAGE_WIDTH, ExifInterface.TAG_TRANSFER_FUNCTION, ExifInterface.TAG_WHITE_POINT, ExifInterface.TAG_X_RESOLUTION, ExifInterface.TAG_Y_CB_CR_COEFFICIENTS, ExifInterface.TAG_Y_CB_CR_POSITIONING, ExifInterface.TAG_Y_CB_CR_SUB_SAMPLING, ExifInterface.TAG_Y_RESOLUTION, }; HashMap<String, Object> exif24_str = getExif_str(exifInterface, tags_24_str); result.putAll(exif24_str); HashMap<String, Object> exif24_double = getExif_double(exifInterface, tags24_double); result.putAll(exif24_double); } return result; } private HashMap<String, Object> getExif_str(ExifInterface exifInterface, String[] tags) { HashMap<String, Object> result = new HashMap<>(); for (String tag : tags) { String attribute = exifInterface.getAttribute(tag); if (!TextUtils.isEmpty(attribute)) { result.put(tag, attribute); } } return result; } private HashMap<String, Object> getExif_double(ExifInterface exifInterface, String[] tags) { HashMap<String, Object> result = new HashMap<>(); for (String tag : tags) { double attribute = exifInterface.getAttributeDouble(tag, 0.0); if (attribute != 0.0) { result.put(tag, attribute); } } return result; } private boolean uriExists(String identifier) { Uri uri = Uri.parse(identifier); String fileName = this.getFileName(uri); return (fileName != null); } private void presentPicker(int maxImages, int galleryMode, ArrayList<String> selectedAssets, HashMap<String, String> options) { String folderMode = options.get("folderMode"); String toolbarFolderTitle = options.get("toolbarFolderTitle"); String toolbarImageTitle = options.get("toolbarImageTitle"); String toolbarDoneButtonText = options.get("toolbarDoneButtonText"); String toolbarArrowColor = options.get("toolbarArrowColor"); int toolbarArrowColorInt = Color.BLACK; String includeAnimation = options.get("includeAnimation"); if (toolbarArrowColor != null && !toolbarArrowColor.isEmpty()) { toolbarArrowColorInt = Color.parseColor(toolbarArrowColor); } ArrayList<Image> selectedImages = new ArrayList<Image>(); for (String path : selectedAssets) { long selectedAssetId = convertMediaUriToId(Uri.parse(path)); String selectedAssetPath = convertMediaUriToPath(Uri.parse(path)); String selectedAssetName = selectedAssetPath.substring(selectedAssetPath.lastIndexOf("/")+1); selectedImages.add(new Image(selectedAssetId, selectedAssetName, selectedAssetPath)); } // galleryMode : 1-Images&Video;2-Images;3-Video boolean includeVideo = false; boolean onlyVideo = false; if (galleryMode == 1) { includeVideo = true; } else if (galleryMode == 2) { includeVideo = false; onlyVideo = false; } else if (galleryMode == 3) { includeVideo = true; onlyVideo = true; } if (maxImages == 1) { ImagePicker.create(MultiImagePickerPlugin.this.activity) .returnMode(ReturnMode.ALL) .includeVideo(includeVideo) .onlyVideo(onlyVideo) .folderMode(folderMode.equals("true")) .includeAnimation(includeAnimation.equals("true")) .toolbarFolderTitle(toolbarFolderTitle) .toolbarImageTitle(toolbarImageTitle) .toolbarDoneButtonText(toolbarDoneButtonText) .toolbarArrowColor(toolbarArrowColorInt) .single() .showCamera(false) .start(); } else { ImagePicker.create(MultiImagePickerPlugin.this.activity) .returnMode(ReturnMode.NONE) .includeVideo(includeVideo) .onlyVideo(onlyVideo) .folderMode(folderMode.equals("true")) .includeAnimation(includeAnimation.equals("true")) .toolbarFolderTitle(toolbarFolderTitle) .toolbarImageTitle(toolbarImageTitle) .toolbarDoneButtonText(toolbarDoneButtonText) .toolbarArrowColor(toolbarArrowColorInt) .multi() .limit(maxImages) .showCamera(false) .origin(selectedImages) .start(); } } private String convertMediaUriToPath(Uri uri) { String [] proj={MediaStore.Images.Media.DATA}; Cursor cursor = context.getContentResolver().query(uri, proj, null, null, null); int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA); cursor.moveToFirst(); String path = cursor.getString(column_index); cursor.close(); return path; } private long convertMediaUriToId(Uri uri) { String [] proj={MediaStore.Images.Media._ID}; Cursor cursor = context.getContentResolver().query(uri, proj, null, null, null); int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media._ID); cursor.moveToFirst(); long id = cursor.getLong(column_index); cursor.close(); return id; } private boolean getIsVideo(Uri uri) { String mimeType = context.getContentResolver().getType(uri); if (mimeType == null) { mimeType = getMimeType(uri); } boolean isImage = mimeType != null && mimeType.startsWith("image"); boolean isVideo = mimeType != null && mimeType.startsWith("video"); //System.out.println("[XXX1] " + uri); //System.out.println("[XXX2] " + mimeType); return !isImage; } private String getMimeType(Uri uri) { String mimeType = null; if (uri.getScheme().equals(ContentResolver.SCHEME_CONTENT)) { ContentResolver cr = context.getContentResolver(); mimeType = cr.getType(uri); } else { String fileExtension = MimeTypeMap.getFileExtensionFromUrl(uri.toString()); mimeType = MimeTypeMap.getSingleton().getMimeTypeFromExtension(fileExtension.toLowerCase()); } return mimeType; } @Override public boolean onActivityResult(int requestCode, final int resultCode, Intent data) { if (requestCode == REQUEST_CODE_CHOOSE && resultCode == Activity.RESULT_CANCELED) { finishWithError("CANCELLED", "The user has cancelled the selection"); } else if (ImagePicker.shouldHandle(requestCode, resultCode, data)) { // Get a list of picked images List<Image> images = ImagePicker.getImages(data); // or get a single image only //Image image = ImagePicker.getFirstImageOrNull(data); if (images == null || images.size() == 0) { clearMethodCallAndResult(); return false; } List<HashMap<String, Object>> result = new ArrayList<>(images.size()); for (Image image : images) { boolean isVideo = isVideoFormat(image); // System.out.println("[path] " + image.getPath()); Uri uri; if (isVideo) { uri = ContentUris.withAppendedId(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, image.getId()); } else { uri = ContentUris.withAppendedId(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, image.getId()); } //System.out.println("[uri] " + uri); HashMap<String, Object> map = new HashMap<>(); map.put("identifier", uri.toString()); InputStream is = null; int width = 0, height = 0; if (isVideo) { MediaMetadataRetriever metaRetriever = new MediaMetadataRetriever(); metaRetriever.setDataSource(image.getPath()); height = Integer.valueOf(metaRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)); width = Integer.valueOf(metaRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)); String metaRotation = metaRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION); int rotation = metaRotation == null ? 0 : Integer.parseInt(metaRotation); if (rotation == 90 || rotation == 270) { int temp = width; width = height; height = temp; } metaRetriever.release(); } else { try { is = context.getContentResolver().openInputStream(uri); BitmapFactory.Options dbo = new BitmapFactory.Options(); dbo.inJustDecodeBounds = true; dbo.inScaled = false; dbo.inSampleSize = 1; BitmapFactory.decodeStream(is, null, dbo); if (is != null) { is.close(); } int orientation = getOrientation(context, uri); if (orientation == 90 || orientation == 270) { width = dbo.outHeight; height = dbo.outWidth; } else { width = dbo.outWidth; height = dbo.outHeight; } } catch (IOException e) { e.printStackTrace(); } } map.put("width", width); map.put("height", height); map.put("name", getFileName(uri)); map.put("isVideo", isVideo); map.put("path", image.getPath()); result.add(map); } finishWithSuccess(result); return true; } else { finishWithSuccess(Collections.emptyList()); clearMethodCallAndResult(); } return false; } private boolean isVideoFormat(Image image) { String extension = getExtension(image.getPath()); String mimeType = TextUtils.isEmpty(extension) ? URLConnection.guessContentTypeFromName(image.getPath()) : MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension); return mimeType != null && mimeType.startsWith("video"); } private String getExtension(String path) { String extension = MimeTypeMap.getFileExtensionFromUrl(path); if (!TextUtils.isEmpty(extension)) { return extension; } if (path.contains(".")) { return path.substring(path.lastIndexOf(".") + 1, path.length()); } else { return ""; } } /* public boolean onActivityResult2(int requestCode, int resultCode, Intent data) { if (requestCode == REQUEST_CODE_CHOOSE && resultCode == Activity.RESULT_CANCELED) { finishWithError("CANCELLED", "The user has cancelled the selection"); } else if (requestCode == REQUEST_CODE_CHOOSE && resultCode == Activity.RESULT_OK) { List<Uri> photos = Matisse.obtainResult(data); if (photos == null) { clearMethodCallAndResult(); return false; } List<HashMap<String, Object>> result = new ArrayList<>(photos.size()); for (Uri uri : photos) { HashMap<String, Object> map = new HashMap<>(); map.put("identifier", uri.toString()); InputStream is = null; int width = 0, height = 0; try { is = context.getContentResolver().openInputStream(uri); BitmapFactory.Options dbo = new BitmapFactory.Options(); dbo.inJustDecodeBounds = true; dbo.inScaled = false; dbo.inSampleSize = 1; BitmapFactory.decodeStream(is, null, dbo); if (is != null) { is.close(); } int orientation = getOrientation(context, uri); if (orientation == 90 || orientation == 270) { width = dbo.outHeight; height = dbo.outWidth; } else { width = dbo.outWidth; height = dbo.outHeight; } } catch (IOException e) { e.printStackTrace(); } map.put("width", width); map.put("height", height); map.put("name", getFileName(uri)); result.add(map); } finishWithSuccess(result); return true; } else { finishWithSuccess(Collections.emptyList()); clearMethodCallAndResult(); } return false; } */ private HashMap<String, Object> getLatLng(ExifInterface exifInterface, @NonNull Uri uri) { HashMap<String, Object> result = new HashMap<>(); double[] latLong = exifInterface.getLatLong(); if (latLong != null && latLong.length == 2) { result.put(ExifInterface.TAG_GPS_LATITUDE, Math.abs(latLong[0])); result.put(ExifInterface.TAG_GPS_LONGITUDE, Math.abs(latLong[1])); } return result; } private HashMap<String, Object> getLatLng(@NonNull Uri uri) { HashMap<String, Object> result = new HashMap<>(); String latitudeStr = "latitude"; String longitudeStr = "longitude"; List<String> latlngList = Arrays.asList(latitudeStr, longitudeStr); int indexNotPresent = -1; String uriScheme = uri.getScheme(); if (uriScheme == null) { return result; } if (uriScheme.equals("content")) { Cursor cursor = context.getContentResolver().query(uri, null, null, null, null); if (cursor == null) { return result; } try { String[] columnNames = cursor.getColumnNames(); List<String> columnNamesList = Arrays.asList(columnNames); for (String latorlngStr : latlngList) { cursor.moveToFirst(); int index = columnNamesList.indexOf(latorlngStr); if (index > indexNotPresent) { Double val = cursor.getDouble(index); // Inserting it as abs as it is the ref the define if the value should be negative or positive if (latorlngStr.equals(latitudeStr)) { result.put(ExifInterface.TAG_GPS_LATITUDE, Math.abs(val)); } else { result.put(ExifInterface.TAG_GPS_LONGITUDE, Math.abs(val)); } } } } catch (NullPointerException e) { e.printStackTrace(); } finally { try { cursor.close(); } catch (NullPointerException e) { e.printStackTrace(); } } } return result; } private String getFileName(Uri uri) { String result = null; if (uri.getScheme().equals("content")) { Cursor cursor = context.getContentResolver().query(uri, null, null, null, null); try { if (cursor != null && cursor.moveToFirst()) { result = cursor.getString(cursor.getColumnIndex(OpenableColumns.DISPLAY_NAME)); } } finally { cursor.close(); } } if (result == null) { result = uri.getPath(); int cut = result.lastIndexOf('/'); if (cut != -1) { result = result.substring(cut + 1); } } return result; } private static int getOrientation(Context context, Uri photoUri) { int rotationDegrees = 0; try { InputStream in = context.getContentResolver().openInputStream(photoUri); assert (in != null); ExifInterface exifInterface = new ExifInterface(in); int orientation = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION, 1); switch (orientation) { case ExifInterface.ORIENTATION_ROTATE_90: rotationDegrees = 90; break; case ExifInterface.ORIENTATION_ROTATE_180: rotationDegrees = 180; break; case ExifInterface.ORIENTATION_ROTATE_270: rotationDegrees = 270; break; } } catch (Exception ignored) { } return rotationDegrees; } private static Bitmap getCorrectlyOrientedImage(Context context, Uri photoUri) throws IOException { InputStream is = context.getContentResolver().openInputStream(photoUri); BitmapFactory.Options dbo = new BitmapFactory.Options(); dbo.inScaled = false; dbo.inSampleSize = 1; dbo.inJustDecodeBounds = true; BitmapFactory.decodeStream(is, null, dbo); if (is != null) { is.close(); } int orientation = getOrientation(context, photoUri); Bitmap srcBitmap; is = context.getContentResolver().openInputStream(photoUri); srcBitmap = BitmapFactory.decodeStream(is); if (is != null) { is.close(); } if (orientation > 0) { Matrix matrix = new Matrix(); matrix.postRotate(orientation); srcBitmap = Bitmap.createBitmap(srcBitmap, 0, 0, srcBitmap.getWidth(), srcBitmap.getHeight(), matrix, true); } return srcBitmap; } public static int calculateInSampleSize( BitmapFactory.Options options, int reqWidth, int reqHeight) { // Raw height and width of image final int height = options.outHeight; final int width = options.outWidth; int inSampleSize = 1; if (height > reqHeight || width > reqWidth) { final int halfHeight = height / 2; final int halfWidth = width / 2; // Calculate the largest inSampleSize value that is a power of 2 and keeps both // height and width larger than the requested height and width. while ((halfHeight / inSampleSize) >= reqHeight && (halfWidth / inSampleSize) >= reqWidth) { inSampleSize *= 2; } } return inSampleSize; } private void finishWithSuccess(List imagePathList) { if (pendingResult != null) pendingResult.success(imagePathList); clearMethodCallAndResult(); } private void finishWithSuccess(HashMap<String, Object> hashMap) { if (pendingResult != null) pendingResult.success(hashMap); clearMethodCallAndResult(); } private void finishWithSuccess() { if (pendingResult != null) pendingResult.success(true); clearMethodCallAndResult(); } private void finishWithAlreadyActiveError(MethodChannel.Result result) { if (result != null) result.error("already_active", "Image picker is already active", null); } private void finishWithError(String errorCode, String errorMessage) { if (pendingResult != null) pendingResult.error(errorCode, errorMessage, null); clearMethodCallAndResult(); } private void clearMethodCallAndResult() { methodCall = null; pendingResult = null; } private boolean setPendingMethodCallAndResult( MethodCall methodCall, MethodChannel.Result result) { if (pendingResult != null) { return false; } this.methodCall = methodCall; pendingResult = result; return true; } }
package com.sochat.client; import java.io.IOException; import org.apache.commons.lang3.tuple.Pair; import com.sochat.shared.io.UserIO; public class ClientInputReader { private UserIO mIo; public ClientInputReader(UserIO io) { mIo = io; } public Pair<String, String> readCredentials() { String username, password; try { mIo.logMessage("Enter your username: "); username = mIo.readLineBlocking(); if (username.contains(":")) { mIo.logError("Username cannot contain colon."); return null; } if (username.length() > 20) { mIo.logError("Username is too long."); return null; } mIo.logMessage("Enter your password: "); password = mIo.readLineBlocking(); } catch (IOException e) { mIo.logError(e.toString()); return null; } return Pair.of(username, password); } }
package com.coolweather.android.gson; import com.google.gson.annotations.SerializedName; /** * Created by Administrator on 2018/12/5/005. */ public class Forecast { public String date; @SerializedName("tmp") public Temperature temperature; @SerializedName("cond") public More more; public class Temperature{ public String max; public String min; } public class More{ @SerializedName("txt_d") public String info; } }
package tech.infofun.popularmovies.fragment; import android.content.ContentResolver; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import java.util.List; import tech.infofun.popularmovies.R; import tech.infofun.popularmovies.activity.FavoriteActivity; import tech.infofun.popularmovies.adapter.MoviesAdapter; import tech.infofun.popularmovies.database.DatabaseMovies; import tech.infofun.popularmovies.model.Movie; /** * Created by tfbarbosa on 18/05/17. */ public class FavoriteFragment extends Fragment{ public static RecyclerView mRecyclerView; public static MoviesAdapter mAdapter; private ContentResolver resolver; private DatabaseMovies dbMovies; private List<Movie> fav; @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View view = inflater.inflate(R.layout.favorite_main_fragment, container, false); mRecyclerView = (RecyclerView) view.findViewById(R.id.recyclerView); if(FavoriteActivity.getDual()) { mRecyclerView.setLayoutManager(new GridLayoutManager(getActivity(), 3)); }else{ mRecyclerView.setLayoutManager(new GridLayoutManager(getActivity(), 2)); } mAdapter = new MoviesAdapter(getActivity()); resolver = getActivity().getContentResolver(); dbMovies = new DatabaseMovies(resolver); fav = dbMovies.getFavoriteMovies(); mAdapter.setmMovieList(fav); mRecyclerView.setAdapter(mAdapter); return view; } @Override public void onSaveInstanceState(Bundle savedInstanceState){ super.onSaveInstanceState(savedInstanceState); } @Override public void onActivityCreated(Bundle savedInstanceState){ super.onActivityCreated(savedInstanceState); } @Override public void onResume(){ fav.clear(); fav = dbMovies.getFavoriteMovies(); mAdapter.setmMovieList(fav); mRecyclerView.setAdapter(mAdapter); super.onResume(); } @Override public void onDestroy(){ super.onDestroy(); } public void onRefresh(){ fav.clear(); fav = dbMovies.getFavoriteMovies(); mAdapter.setmMovieList(fav); mRecyclerView.setAdapter(mAdapter); } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.test.anno; import java.lang.annotation.*; @Retention(RetentionPolicy.RUNTIME) public @interface AnnoMissingClass { Class<?> value(); }
/* * Code Dx API * Code Dx provides a variety of REST APIs, allowing external applications and scripts to interface with core functionality. This guide documents the various REST resources provided by Code Dx. ## Authentication Authentication is a requirement when accessing API endpoints. There are two methods by which authentication may be performed. ### API Keys The primary method for authentication is passing an `API-Key` header containing a valid API key with all requests. For example&#58; `API-Key: 550e8400-e29b-41d4-a716-44665544000` API keys may be generated by Code Dx admins. Once they are generated, in most cases, they behave like regular users. They will need to be assigned user roles for any projects they will be used with. Although it is possible to assign the *admin* [role](UserGuide.html#UserRolesConfiguration) to an API key, the recommendation is to avoid doing so unless absolutely necessary. See the user guide for an overview about how to create and manage [API keys](UserGuide.html#APIKeysAdministration). ### HTTP Basic Authentication HTTP Basic authentication may be used to authenticate with the API as a regular user. This is accomplished by including an `Authorization` header containing a typical authorization credential. ## Error Handling ### Bad Requests For API calls that accept input, invalid values will trigger an HTTP 400 Bad Request status code. ### Server Errors For any API call, if an unexpected error occurs, an HTTP 500 Internal Server Error status code will be returned. If an error message is available, the response will include a basic message body describing the error&#58; ` { \"error\": \"error message\" } ` The error property will contain a string message indicating the nature of the error. ### Errors with Third-Party Applications Sometimes Code Dx must communicate with third-party applications like JIRA, Git, and certain enterprise tools. Some users may have in-house versions of these with self-signed certificates which may not be \"trusted\". In these cases, the API will respond with an HTTP 502 BAD GATEWAY status. If this happens, refer to [Trusting Self-Signed Certificates](InstallGuide.html#TrustingSelfSignedCertificates) in the install guide. ### API Unavailable In special circumstances, particularly during the installation and update phases, the API will be unavailable. When the API is unavailable, all calls will return an HTTP 503 Service Unavailable status, and no actions or side effects will occur as a result of the calls. ## Examples Code Dx's API uses REST over HTTP. As such, you can use any language/utility that supports making HTTP requests to interact with the API. The examples below use <a href=\"https://curl.haxx.se/\" target=\"_blank\">curl</a>, a popular command-line utility, to do so. First, you'll need to [generate an API Key](UserGuide.html#APIKeysAdministration). Second, while you can create projects through the API, these examples will assume that you've already created one. You'll need to know its project id number, which you can find by looking at the URL for the [Findings Page](UserGuide.html#Findings), which will end in a number. **Note:** Many API endpoints require a JSON body in the request. Most JSON will contain double-quotes (`\"`) and spaces, which have special meaning when used on the command line. In order to ensure your JSON body is interpreted as a single argument, you must <a href=\"https://en.wikipedia.org/wiki/Escape_character\" target=\"_blank\">escape</a> it properly. For example, if you wanted to `POST` the following JSON body&#58; ``` { \"name\": \"John Doe\" } ``` You would put a backslash (`\\`) before each double-quote (`\"`), and surround the whole thing with double-quotes&#58; ``` \"{ \\\"name\\\": \\\"John Doe\\\" }\" ``` The outermost double-quotes tell the command-line interpreter that everything within them is to be treated as a single argument (as opposed to the usual space-separated behavior). The backslash before each inner double-quote tells the command-line interpreter that you mean the literal double-quote character, and not the end of the quoted argument. In many *non-Windows* operating systems, you can also use a single-quote (`'`) to surround the argument, and skip the backslashes&#58; ``` '{ \"name\": \"John Doe\" }' ``` The examples below will use the double-quotes and backslashes style, as it works on most (if not all) operating systems. ### Running an Analysis To start an analysis, you can run ``` curl -F file1=@src.zip -H \"API-Key: 942d16d4-fb3f-4653-9cb3-a9da2e28e574\" https://<yourcodedxserver>/codedx/api/projects/<project id>/analysis ``` Make sure you use your own values for the `src.zip` file, the API Key, the hostname for your Code Dx server, and the project id. **Note:** This endpoint is not listed below due to a swagger limitation. ### Retrieving Finding Data There is a wide variety of data available for the findings of a project after running an analysis. Two examples are provided here. ### Findings Table Data You can retrieve the data that's used to populate the [findings table](UserGuide.html#FindingsTable)&#58; ``` curl -H \"Content-Type: application/json\" -X POST -d \"{\\\"filter\\\":{},\\\"sort\\\":{\\\"by\\\":\\\"id\\\",\\\"direction\\\":\\\"ascending\\\"},\\\"pagination\\\":{\\\"page\\\":1,\\\"perPage\\\":10}}\" -H \"API-Key: 942d16d4-fb3f-4653-9cb3-a9da2e28e574\" https://<yourcodedxserver>/codedx/api/projects/<project id>/findings/table ``` Additional information about how to construct more useful filter and sort requests can be found in the documentation. ### Finding Metadata You can retrieve metadata for a finding, if you know the finding id. You can take the ID for a finding from the Findings Table&#58; ``` curl -H \"API-Key: 942d16d4-fb3f-4653-9cb3-a9da2e28e574\" https://<yourcodedxserver>/codedx/api/findings/<finding id> ``` ### Generating a Report You can use the API to generate a PDF [report](UserGuide.html#GenerateReport). ``` curl -H \"Content-Type: application/json\" -X POST -d \"{\\\"filter\\\":{},\\\"config\\\":{\\\"summaryMode\\\":\\\"simple\\\",\\\"detailsMode\\\":\\\"simple\\\",\\\"includeResultDetails\\\":true,\\\"includeComments\\\":false}}\" -H \"API-Key: 942d16d4-fb3f-4653-9cb3-a9da2e28e574\" https://<yourcodedxserver>/codedx/api/projects/<project id>/report/pdf ``` ## Generating a Client SDK If you require a client SDK for Code Dx, you can generate one using [Swagger Code Generator](https://github.com/swagger-api/swagger-codegen). Detailed instructions are available on the github page and our swagger spec can be found [here](swagger/swagger.json). * * OpenAPI spec version: 3.0.0 * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package com.codedx.client.api; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; import java.util.List; /** * RuleSetDeleteError */ @javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaClientCodegen", date = "2019-01-02T17:17:22.434-05:00") public class RuleSetDeleteError { @JsonProperty("error") private String error = null; @JsonProperty("projectIds") private List<Integer> projectIds = null; public RuleSetDeleteError error(String error) { this.error = error; return this; } /** * Get error * @return error **/ @ApiModelProperty(value = "") public String getError() { return error; } public void setError(String error) { this.error = error; } public RuleSetDeleteError projectIds(List<Integer> projectIds) { this.projectIds = projectIds; return this; } public RuleSetDeleteError addProjectIdsItem(Integer projectIdsItem) { if (this.projectIds == null) { this.projectIds = new ArrayList<Integer>(); } this.projectIds.add(projectIdsItem); return this; } /** * Get projectIds * @return projectIds **/ @ApiModelProperty(value = "") public List<Integer> getProjectIds() { return projectIds; } public void setProjectIds(List<Integer> projectIds) { this.projectIds = projectIds; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } RuleSetDeleteError ruleSetDeleteError = (RuleSetDeleteError) o; return Objects.equals(this.error, ruleSetDeleteError.error) && Objects.equals(this.projectIds, ruleSetDeleteError.projectIds); } @Override public int hashCode() { return Objects.hash(error, projectIds); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class RuleSetDeleteError {\n"); sb.append(" error: ").append(toIndentedString(error)).append("\n"); sb.append(" projectIds: ").append(toIndentedString(projectIds)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * Copyright (c) 2014-2016 CODING. */ package net.coding.ide.utils; /** * Created by phy on 2015/3/7. */ public interface Callback <T> { public void call(T arg); }
package com.example.eva1_8_escenas; import org.junit.Test; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleUnitTest { @Test public void addition_isCorrect() { assertEquals(4, 2 + 2); } }
package local.org.bouncycastle.asn1; import java.io.IOException; import local.org.bouncycastle.util.Arrays; import local.org.bouncycastle.util.Strings; /** * DER PrintableString object. */ public class DERPrintableString extends ASN1Primitive implements ASN1String { private byte[] string; /** * return a printable string from the passed in object. * * @exception IllegalArgumentException if the object cannot be converted. */ public static DERPrintableString getInstance( Object obj) { if (obj == null || obj instanceof DERPrintableString) { return (DERPrintableString)obj; } if (obj instanceof byte[]) { try { return (DERPrintableString)fromByteArray((byte[])obj); } catch (Exception e) { throw new IllegalArgumentException("encoding error in getInstance: " + e.toString()); } } throw new IllegalArgumentException("illegal object in getInstance: " + obj.getClass().getName()); } /** * return a Printable String from a tagged object. * * @param obj the tagged object holding the object we want * @param explicit true if the object is meant to be explicitly * tagged false otherwise. * @exception IllegalArgumentException if the tagged object cannot * be converted. */ public static DERPrintableString getInstance( ASN1TaggedObject obj, boolean explicit) { ASN1Primitive o = obj.getObject(); if (explicit || o instanceof DERPrintableString) { return getInstance(o); } else { return new DERPrintableString(ASN1OctetString.getInstance(o).getOctets()); } } /** * basic constructor - byte encoded string. */ DERPrintableString( byte[] string) { this.string = string; } /** * basic constructor - this does not validate the string */ public DERPrintableString( String string) { this(string, false); } /** * Constructor with optional validation. * * @param string the base string to wrap. * @param validate whether or not to check the string. * @throws IllegalArgumentException if validate is true and the string * contains characters that should not be in a PrintableString. */ public DERPrintableString( String string, boolean validate) { if (validate && !isPrintableString(string)) { throw new IllegalArgumentException("string contains illegal characters"); } this.string = Strings.toByteArray(string); } public String getString() { return Strings.fromByteArray(string); } public byte[] getOctets() { return Arrays.clone(string); } boolean isConstructed() { return false; } int encodedLength() { return 1 + StreamUtil.calculateBodyLength(string.length) + string.length; } void encode( ASN1OutputStream out) throws IOException { out.writeEncoded(BERTags.PRINTABLE_STRING, string); } public int hashCode() { return Arrays.hashCode(string); } boolean asn1Equals( ASN1Primitive o) { if (!(o instanceof DERPrintableString)) { return false; } DERPrintableString s = (DERPrintableString)o; return Arrays.areEqual(string, s.string); } public String toString() { return getString(); } /** * return true if the passed in String can be represented without * loss as a PrintableString, false otherwise. * * @return true if in printable set, false otherwise. */ public static boolean isPrintableString( String str) { for (int i = str.length() - 1; i >= 0; i--) { char ch = str.charAt(i); if (ch > 0x007f) { return false; } if ('a' <= ch && ch <= 'z') { continue; } if ('A' <= ch && ch <= 'Z') { continue; } if ('0' <= ch && ch <= '9') { continue; } switch (ch) { case ' ': case '\'': case '(': case ')': case '+': case '-': case '.': case ':': case '=': case '?': case '/': case ',': continue; } return false; } return true; } }
/* * Copyright © 2018 Apple Inc. and the ServiceTalk project authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.servicetalk.client.internal; import io.servicetalk.concurrent.Cancellable; import io.servicetalk.concurrent.api.Completable; import io.servicetalk.concurrent.api.Publisher; import io.servicetalk.concurrent.internal.LatestValueSubscriber; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import static io.servicetalk.concurrent.Cancellable.IGNORE_CANCEL; import static java.util.concurrent.atomic.AtomicIntegerFieldUpdater.newUpdater; abstract class AbstractReservableRequestConcurrencyController implements ReservableRequestConcurrencyController { private static final AtomicIntegerFieldUpdater<AbstractReservableRequestConcurrencyController> pendingRequestsUpdater = newUpdater(AbstractReservableRequestConcurrencyController.class, "pendingRequests"); private static final int STATE_QUIT = -2; private static final int STATE_RESERVED = -1; private static final int STATE_IDLE = 0; /* * Following semantics: * STATE_RESERVED if this is reserved. * STATE_QUIT if quit command issued. * STATE_IDLE if connection is not used. * pending request count if none of the above states. */ @SuppressWarnings("unused") private volatile int pendingRequests; private final LatestValueSubscriber<Integer> maxConcurrencyHolder; AbstractReservableRequestConcurrencyController(final Publisher<Integer> maxConcurrencySettingStream, final Completable onClose) { maxConcurrencyHolder = new LatestValueSubscriber<>(); maxConcurrencySettingStream.subscribe(maxConcurrencyHolder); onClose.subscribe(new Completable.Subscriber() { @Override public void onSubscribe(Cancellable cancellable) { // No op } @Override public void onComplete() { pendingRequests = STATE_QUIT; } @Override public void onError(Throwable ignored) { pendingRequests = STATE_QUIT; } }); } @Override public final void requestFinished() { pendingRequestsUpdater.decrementAndGet(this); } @Override public boolean tryReserve() { return pendingRequestsUpdater.compareAndSet(this, STATE_IDLE, STATE_RESERVED); } @Override public Completable releaseAsync() { return new Completable() { @Override protected void handleSubscribe(Subscriber subscriber) { subscriber.onSubscribe(IGNORE_CANCEL); // Ownership is maintained by the caller. if (pendingRequestsUpdater.compareAndSet(AbstractReservableRequestConcurrencyController.this, STATE_RESERVED, STATE_IDLE)) { subscriber.onComplete(); } else { subscriber.onError(new IllegalStateException("Resource " + this + (pendingRequests == STATE_QUIT ? " is closed." : " was not reserved."))); } } }; } final int getLastSeenMaxValue(int defaultValue) { return maxConcurrencyHolder.getLastSeenValue(defaultValue); } final int getPendingRequests() { return pendingRequests; } final boolean casPendingRequests(int oldValue, int newValue) { return pendingRequestsUpdater.compareAndSet(this, oldValue, newValue); } }
/* * Copyright 2016-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.joinfaces.example; import org.junit.Test; import org.springframework.security.config.annotation.web.builders.HttpSecurity; public class SecurityConfigIT { @Test(expected = RuntimeException.class) public void exceptionOnConfigureNull() { new SecurityConfig().configure((HttpSecurity) null); } }
package gedi.solutions.geode.operations.functions; import java.lang.management.ManagementFactory; import java.util.Properties; import javax.management.MBeanServer; import javax.management.ObjectName; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.Declarable; import org.apache.geode.cache.execute.Function; import org.apache.geode.cache.execute.FunctionContext; import org.apache.geode.distributed.DistributedSystem; import org.apache.logging.log4j.Logger; /** * <p> * The function will shutdown the distribute system * thus preventing disk stores from being corrupted. * </p> * * <p> * gfsh&gt;execute function --group="gbc-data-node" --id="SystemShutdown" * </p> * * <p> * Note the System.exit(0) will be executed to stop the JVM * </p> * * @author Gregory Green * */ public class SystemShutDownFunction implements Function<Object>, Declarable { //@Autowired //private LoggingService loggingService; /** * This method will the DistributeMember MBean and call to the shutdown method. * Note that after the initiate function is executed other members may experience a * "Disconnected * */ private static final long serialVersionUID = -4345180049555487810L; @Override public void execute(FunctionContext<Object> functionContext) { String distributeMemberName = "unknown"; Logger logger = null; try { Cache cache = CacheFactory.getAnyInstance(); if(cache != null && !cache.isClosed()) { DistributedSystem distributedSystem = cache.getDistributedSystem(); //Assigned distributed member name distributeMemberName = distributedSystem.getDistributedMember().getName(); if(distributedSystem.isConnected() ) { MBeanServer jmx = ManagementFactory.getPlatformMBeanServer(); ObjectName on = new ObjectName("GemFire:service=System,type=Distributed"); logger = org.apache.logging.log4j.LogManager.getLogger(getClass()); if(logger != null) logger.fatal("FUNCTION:SystemDownFunction invoking shutDownAllMembers on member:"+distributeMemberName); try { jmx.invoke(on, "shutDownAllMembers", null, null); } catch(Exception e) { String message = e.getMessage(); if(message != null && message.contains("distributed system has been disconnected")) { //ignore and just exit JVM if(logger != null) { logger.warn("FUNCTION:SystemDownFunction shutting down disconnected member:"+distributeMemberName); } System.exit(0); } else throw e; //rethrow } } } } catch (Exception e) { if(logger != null) { logger.warn(e.toString()); } } if(logger != null) { logger.warn("FUNCTION:SystemDownFunction shutting down member:"+distributeMemberName); } System.exit(0); }// -------------------------------------------- /** * @return Shutdown */ @Override public String getId() { return "SystemShutDownFunction"; } @Override public boolean hasResult() { return false; } @Override public boolean isHA() { return false; } @Override public boolean optimizeForWrite() { return false; } public void init(Properties arg0) { // TODO Auto-generated method stub } }
/* See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * Esri Inc. licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.esri.gpt.control.livedata.selector; import com.esri.gpt.control.livedata.IRenderer; import com.esri.gpt.control.livedata.IRendererFactory; import com.esri.gpt.control.livedata.LoginDlgRenderer; import com.esri.gpt.control.livedata.selector.HttpRequestListenerMap.ISelector; import com.esri.gpt.framework.http.CredentialProvider; import java.util.Collection; /** * Renderer selector. */ public class RendererSelector { private Collection<IRendererFactory> factories; /** * Creates instance of the selector. * @param factories renderer factories */ public RendererSelector(Collection<IRendererFactory> factories) { this.factories = factories; } /** * Selects factory for the given URL. * @param url URL * @param cp credential provider or <code>null</code> * @return renderer or <code>null</code> if no renderer found */ public IRenderer select(String url, CredentialProvider cp) { IRenderer renderer = null; final Setters setters = new Setters(); HttpRequestListenerMap map = new HttpRequestListenerMap(); HttpRequestListenerMap exactMap = new HttpRequestListenerMap(); for (IRendererFactory rf : factories) { rf.register(map, rf.isDefinitive() ? setters.getDefinitiveSetter() : setters.getNonDefinitiveSetter(), url); if (rf.isDefinitive()) { rf.register(exactMap, setters.getDefinitiveSetter(), url); } } final String exactUrl = url; exactMap = exactMap.select(new ISelector() { public boolean eligible(HttpRequestDefinition httpReqDef) { return httpReqDef.getUrl().equalsIgnoreCase(exactUrl); } }); HttpRequestDispatcher exactDisp = new HttpRequestDispatcher(setters, exactMap, cp) { @Override protected void onUnauthorizedException() { setters.getDefinitiveSetter().set(new LoginDlgRenderer()); } }; synchronized (setters) { exactDisp.dispatch(); try { setters.wait(30000); } catch (InterruptedException ex) { } renderer = setters.getRenderer(); } if (renderer == null) { HttpRequestDispatcher disp = new HttpRequestDispatcher(setters, map, cp) { @Override protected void onUnauthorizedException() { setters.getDefinitiveSetter().set(new LoginDlgRenderer()); } }; synchronized (setters) { disp.dispatch(); try { setters.wait(30000); } catch (InterruptedException ex) { } renderer = setters.getRenderer(); } } return renderer; } }
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.admanager.jaxws.v202002; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for GrpAge. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="GrpAge"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="UNKNOWN"/> * &lt;enumeration value="AGE_UNKNOWN"/> * &lt;enumeration value="AGE_0_TO_17"/> * &lt;enumeration value="AGE_18_TO_24"/> * &lt;enumeration value="AGE_25_TO_34"/> * &lt;enumeration value="AGE_35_TO_44"/> * &lt;enumeration value="AGE_45_TO_54"/> * &lt;enumeration value="AGE_55_TO_64"/> * &lt;enumeration value="AGE_65_PLUS"/> * &lt;enumeration value="AGE_18_TO_49"/> * &lt;enumeration value="AGE_21_TO_34"/> * &lt;enumeration value="AGE_21_TO_49"/> * &lt;enumeration value="AGE_21_PLUS"/> * &lt;enumeration value="AGE_25_TO_49"/> * &lt;enumeration value="AGE_21_TO_44"/> * &lt;enumeration value="AGE_21_TO_54"/> * &lt;enumeration value="AGE_21_TO_64"/> * &lt;enumeration value="AGE_35_TO_49"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "GrpAge") @XmlEnum public enum GrpAge { /** * * The value returned if the actual value is not exposed by the requested API version. * * */ UNKNOWN, /** * * When the age range is not available due to low impression levels, GRP privacy thresholds are * activated and prevent us from specifying age. * * */ AGE_UNKNOWN, AGE_0_TO_17, AGE_18_TO_24, AGE_25_TO_34, AGE_35_TO_44, AGE_45_TO_54, AGE_55_TO_64, AGE_65_PLUS, AGE_18_TO_49, AGE_21_TO_34, AGE_21_TO_49, AGE_21_PLUS, AGE_25_TO_49, AGE_21_TO_44, AGE_21_TO_54, AGE_21_TO_64, AGE_35_TO_49; public String value() { return name(); } public static GrpAge fromValue(String v) { return valueOf(v); } }
package io.binac.leetcode; import java.util.ArrayList; import java.util.List; /** * Given a string s, partition s such that every substring of the partition is a palindrome. * <p> * <p>Return all possible palindrome partitioning of s. * <p> * <p>Example: * <blockquote><pre> * Input: "aab" * Output: * [ * ["aa","b"], * ["a","a","b"] * ] * </blockquote></pre> */ public class PalindromePartitioning { public static class Solution1 { private void partition(char[] chars, int index, boolean[][] palindrome, List<List<String>> result, List<String> path) { if (index == chars.length) { result.add(new ArrayList<>(path)); return; } for (int j = index; j < chars.length; ++j) { if (palindrome[index][j]) { path.add(new String(chars, index, j - index + 1)); partition(chars, j + 1, palindrome, result, path); path.remove(path.size() - 1); } } } public List<List<String>> partition(String s) { final char chars[] = s.toCharArray(); final int len = chars.length; boolean dp[][] = new boolean[len][len]; for (int i = len - 1; i >= 0; --i) { dp[i][i] = true; if (i + 1 < len && chars[i] == chars[i + 1]) dp[i][i + 1] = true; for (int j = i + 2; j < len; ++j) { if (dp[i + 1][j - 1] && chars[i] == chars[j]) dp[i][j] = true; } } List<List<String>> result = new ArrayList<>(); partition(chars, 0, dp, result, new ArrayList<>(len)); return result; } } }
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.nio.ascii; import com.hazelcast.internal.nio.tcp.TcpIpConnection; /** * This interface is a text protocols policy enforcement point. It checks incoming command lines and validates if the command * can be processed. If the command is unknown or not allowed the connection is closed. */ interface TextProtocolFilter { void filterConnection(String commandLine, TcpIpConnection connection); }
/* * StandardButton.java * * Copyright (C) 2000-2003 Peter Graves * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package org.armedbear.j; import java.awt.Dimension; import javax.swing.JButton; public final class StandardButton extends JButton { public static final int DEFAULT_WIDTH = 80; public static final int DEFAULT_HEIGHT = 24; public StandardButton(String text) { super(text); if (Editor.lookAndFeel == null) { Dimension dim = new Dimension(DEFAULT_WIDTH, DEFAULT_HEIGHT); setMinimumSize(dim); setMaximumSize(dim); setPreferredSize(dim); } } }
/* $Id$ * * Copyright (c) 2008-2011, The University of Edinburgh. * All Rights Reserved */ package uk.ac.ed.ph.snuggletex.utilities; import uk.ac.ed.ph.snuggletex.SnuggleEngine; import javax.xml.transform.Templates; /** * Encapsulates a simple cache for the internal XSLT stylesheets used by SnuggleTeX. * This can be used if you want SnuggleTeX to integrate with some kind of XSLT caching mechanism * (e.g. your own). * <p> * A {@link SnuggleEngine} creates a default implementation of this that caches stylesheets * over the lifetime of the {@link SnuggleEngine} Object, which is reasonable. If you want * to change this, create your own implementation and attach it to your {@link SnuggleEngine}. * <p> * You can use the {@link SimpleStylesheetCache} in your own applications if you want to. * * <h2>Internal Note</h2> * * (I'm not currently enforcing that implementations of this should be thread-safe. Therefore, make * sure that you synchronise correctly when accessing an instance of this cache. You would normally * just use a {@link StylesheetManager} instance to do this safely.) * * @see SimpleStylesheetCache * * @author David McKain * @version $Revision$ */ public interface StylesheetCache { /** * Tries to retrieve an XSLT stylesheet from the cache having the given key. * <p> * Return a previously cached {@link Templates} or null if your cache doesn't want to cache * this or if it does not contain the required result. */ Templates getStylesheet(String key); /** * Instructs the cache that it might want to store the given XSLT stylesheet corresponding * to the given key. * <p> * Implementations can safely choose to do absolutely nothing here if they want. */ void putStylesheet(String key, Templates stylesheet); }
package com.icepoint.base.web.resource.util; import com.fasterxml.jackson.databind.JsonNode; import lombok.experimental.UtilityClass; import org.springframework.lang.Nullable; import java.util.stream.StreamSupport; @UtilityClass public class JacksonUtils { @Nullable public static Object getMostSpecificValue(@Nullable JsonNode jsonNode) { if (jsonNode == null || jsonNode.isNull()) return null; else if (jsonNode.isArray()) return StreamSupport.stream(jsonNode.spliterator(), false) .map(JacksonUtils::getMostSpecificValue).toArray(); else if (jsonNode.isLong()) return jsonNode.asLong(); else if (jsonNode.isInt()) return jsonNode.asInt(); else if (jsonNode.isBoolean()) return jsonNode.asBoolean(); else return jsonNode.asText(); } }
package utils; import java.util.Random; public class ArrayUtils { /** * 打印数组的工具类 * @param nums */ public static void printArray(Comparable[] nums){ if (nums==null||nums.length==0){ throw new IllegalArgumentException("array is null"); } System.out.print("["); for (int i = 0; i < nums.length; i++) { if (i!=nums.length-1){ System.out.print(nums[i]+","); }else { System.out.print(nums[i]+"]"); } } } /** * 生成一个整型的随机数组 * @param len * @param start 随机数范围起始位置 * @param end 随机数范围终止位置 * @return */ public static Integer[] createRandomArray(int len,int start,int end){ Integer[] arr=new Integer[len]; for (int i = 0; i < len; i++) { Random random = new Random(); int num = random.nextInt((end - start) + 1) + start; arr[i]=num; } return arr; } /** * 打印二维数组的工具类 * @param martix */ public static void print2DimenArray(Comparable[][] martix){ int m = martix.length; if (m==0){ throw new IllegalArgumentException("martix error"); } int n=martix[0].length; for (int i = 0; i < m; i++) { for (int j = 0; j < n; j++) { if (j==n-1){ System.out.print(martix[i][j]); }else { System.out.print(martix[i][j]+","); } } System.out.println(); } } }
package com.mcoding.base.product.service.productCategoryRef.impl; import com.mcoding.base.core.PageView; import com.mcoding.base.product.bean.productCategoryRef.ProductCategoryRef; import com.mcoding.base.product.bean.productCategoryRef.ProductCategoryRefExample; import com.mcoding.base.product.persistence.productCategoryRef.ProductCategoryRefMapper; import com.mcoding.base.product.service.productCategoryRef.ProductCategoryRefService; import java.util.List; import javax.annotation.Resource; import org.springframework.cache.annotation.CacheEvict; import org.springframework.cache.annotation.Cacheable; import org.springframework.stereotype.Service; @Service("productCategoryRefService") public class ProductCategoryRefServiceImpl implements ProductCategoryRefService { @Resource protected ProductCategoryRefMapper productCategoryRefMapper; @CacheEvict(value={"productCategoryRef"}, allEntries=true) @Override public void addObj(ProductCategoryRef t) { this.productCategoryRefMapper.insertSelective(t); } @CacheEvict(value={"productCategoryRef"}, allEntries=true) @Override public void deleteObjById(int id) { this.productCategoryRefMapper.deleteByPrimaryKey(id); } @CacheEvict(value={"productCategoryRef"}, allEntries=true) @Override public void modifyObj(ProductCategoryRef t) { if (t.getId() == null || t.getId() ==0) { throw new NullPointerException("id 为空,无法更新"); } this.productCategoryRefMapper.updateByPrimaryKeySelective(t); } @Cacheable(value="productCategoryRef", key="'ProductCategoryRefService_' + #root.methodName + '_' +#id") @Override public ProductCategoryRef queryObjById(int id) { return this.productCategoryRefMapper.selectByPrimaryKey(id); } @Cacheable(value="productCategoryRef", key="'ProductCategoryRefService_' + #root.methodName + '_'+ #example.toJson()") @Override public List<ProductCategoryRef> queryAllObjByExample(ProductCategoryRefExample example) { return this.productCategoryRefMapper.selectByExample(example); } @Cacheable(value="productCategoryRef", key="'ProductCategoryRefService_' + #root.methodName + '_'+ #example.toJson()") @Override public PageView<ProductCategoryRef> queryObjByPage(ProductCategoryRefExample example) { PageView<ProductCategoryRef> pageView = example.getPageView(); if (pageView == null) { pageView = new PageView<>(1, 10); example.setPageView(pageView); } pageView.setQueryResult(this.productCategoryRefMapper.selectByExampleByPage(example)); return pageView; } }
package org.workcraft.dom.references; import org.workcraft.dom.Node; import org.workcraft.dom.hierarchy.NamespaceHelper; import org.workcraft.dom.hierarchy.NamespaceProvider; import org.workcraft.dom.math.MathNode; import org.workcraft.observation.HierarchyEvent; import org.workcraft.observation.HierarchySupervisor; import org.workcraft.observation.NodesAddedEvent; import org.workcraft.observation.NodesDeletedEvent; import org.workcraft.serialisation.References; import org.workcraft.utils.Hierarchy; import java.util.Collection; import java.util.HashMap; public class HierarchyReferenceManager extends HierarchySupervisor implements ReferenceManager { private final HashMap<NamespaceProvider, NameManager> managers = new HashMap<>(); // every node belongs to some name space provider (except the main root node of the model) private final HashMap<Node, NamespaceProvider> node2namespace = new HashMap<>(); private NamespaceProvider topProvider; // namespace provided by root private References refs; public HierarchyReferenceManager() { this(null); } public HierarchyReferenceManager(References refs) { this.refs = refs; } public NamespaceProvider getNamespaceProvider(Node node) { NamespaceProvider provider = node2namespace.get(node); if (provider == null) { Node container = node.getParent(); if (container != null) { provider = Hierarchy.getNearestAncestor(container, NamespaceProvider.class); } } node2namespace.put(node, provider); return provider; } public void setNamespaceProvider(Collection<Node> nodes, NamespaceProvider provider) { setNamespaceProvider(nodes, this, provider); } public void setNamespaceProvider(Collection<Node> nodes, HierarchyReferenceManager srcRefManager, NamespaceProvider dstProvider) { if (dstProvider == null) { dstProvider = topProvider; } for (Node node : nodes) { NamespaceProvider srcProvider = srcRefManager.getNamespaceProvider(node); if (srcProvider != null) { String name = srcRefManager.getName(node); // Clear cached data in the local and the source reference manager. node2namespace.remove(node); srcRefManager.node2namespace.remove(node); // Do not assign name if it was not assigned in the first place (e.g. for an implicit place). if ((name != null) && ((dstProvider != srcProvider) || (node2namespace != srcRefManager.node2namespace))) { NameManager srcNameManager = srcRefManager.getNameManager(srcProvider); NameManager dstNameManager = this.getNameManager(dstProvider); srcNameManager.remove(node); Node clashingNode = dstNameManager.getNode(name); if (nodes.contains(clashingNode)) { String newName = dstNameManager.getDerivedName(clashingNode, name); dstNameManager.setName(clashingNode, newName, true); } String newName = dstNameManager.getDerivedName(node, name); // Allow flexibility on naming nodes in case there is name clash in dstNameManager dstNameManager.setName(node, newName, false); } } } } @Override public void attach(Node root) { // Root must be a namespace provider. topProvider = (NamespaceProvider) root; if (refs != null) { for (Node n : Hierarchy.getDescendantsOfType(root, Node.class)) { setExistingReference(n); } refs = null; } super.attach(root); } public NameManager getNameManager(NamespaceProvider provider) { if (provider == null) { provider = topProvider; } NameManager man = managers.get(provider); if (man == null) { man = createNameManager(); managers.put(provider, man); } return man; } public NameManager getNameManager(Node node) { return getNameManager(getNamespaceProvider(node)); } protected NameManager createNameManager() { return new DefaultNameManager(); } protected void setExistingReference(Node node) { String reference = refs.getReference(node); if (reference != null) { String name = NamespaceHelper.getReferenceName(reference); setName(node, name); } } @Override public Node getNodeByReference(NamespaceProvider provider, String reference) { if (provider == null) { provider = topProvider; } if (reference.isEmpty() || reference.equals(NamespaceHelper.getHierarchySeparator())) { return provider; } String head = NamespaceHelper.getReferenceHead(reference); String tail = NamespaceHelper.getReferenceTail(reference); NameManager man = getNameManager(provider); Node node = man.getNode(head); if (node instanceof NamespaceProvider) { return getNodeByReference((NamespaceProvider) node, tail); } return node; } @Override public String getNodeReference(NamespaceProvider provider, Node node) { if (node == topProvider) { return NamespaceHelper.getHierarchySeparator(); } if (provider == null) { provider = topProvider; } NamespaceProvider component = null; String result = ""; do { component = getNamespaceProvider(node); if (component != null) { String name = getNameManager(component).getName(node); // The unnamed component just returns null. if (name == null) return null; result = name + result; node = node.getParent(); } } while ((node != null) && (component != null) && (component != provider)); return result; } @Override public void handleEvent(HierarchyEvent e) { if (e instanceof NodesAddedEvent) { for (Node node : e.getAffectedNodes()) { if (node.getParent() != null) { // if it is not a root node NameManager man = getNameManager(node); man.setDefaultNameIfUnnamed(node); // additional call to propagate the name data after calling setDefaultNameIfUnnamed setName(node, man.getName(node)); } for (Node childNode : Hierarchy.getDescendantsOfType(node, Node.class)) { NameManager mgr = getNameManager(childNode); mgr.setDefaultNameIfUnnamed(childNode); // additional call to propagate the name data after calling setDefaultNameIfUnnamed setName(childNode, mgr.getName(childNode)); } } } if (e instanceof NodesDeletedEvent) { for (Node node : e.getAffectedNodes()) { getNameManager(node).remove(node); node2namespace.remove(node); for (Node childNode : Hierarchy.getDescendantsOfType(node, Node.class)) { getNameManager(childNode).remove(childNode); node2namespace.remove(childNode); } } } } public void setName(Node node, String name) { setName(node, name, true); } public void setName(Node node, String name, boolean force) { NameManager mgr = getNameManager(node); mgr.setName(node, name, force); } public String getName(Node node) { NameManager mgr = getNameManager(node); return mgr.getName(node); } public void setDefaultName(MathNode node) { NameManager nameManager = getNameManager(node); nameManager.setDefaultName(node); } }
package org.carly.api.rest.request; import lombok.Getter; import lombok.Setter; import org.bson.types.ObjectId; @Getter @Setter public class WindowsRequest { private ObjectId id; private String name; private String color; }
package com.lg.task; import java.util.concurrent.ExecutionException; import java.util.concurrent.FutureTask; import java.util.concurrent.RunnableFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; /** * @author leiting * @des * @since 16/9/23 */ public class Task<T> implements RunnableFuture<T> { FutureTask<T> mTFutureTask; public Task(Runnable runnable) { mTFutureTask = new FutureTask<T>(runnable, null); } @Override public void run() { mTFutureTask.run(); } @Override public boolean cancel(boolean mayInterruptIfRunning) { return mTFutureTask.cancel(mayInterruptIfRunning); } @Override public boolean isCancelled() { return mTFutureTask.isCancelled(); } @Override public boolean isDone() { return mTFutureTask.isDone(); } @Override public T get() throws InterruptedException, ExecutionException { return mTFutureTask.get(); } @Override public T get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return mTFutureTask.get(timeout, unit); } }
package com.pivottech.booking.model; import lombok.Data; import javax.validation.constraints.Min; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; @Data public class MakeReservationRequest { @NotNull @Min(0) Long availabilityId; @NotEmpty String description; }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.inspector.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.inspector.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * DescribeAssessmentTargetsResult JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeAssessmentTargetsResultJsonUnmarshaller implements Unmarshaller<DescribeAssessmentTargetsResult, JsonUnmarshallerContext> { public DescribeAssessmentTargetsResult unmarshall(JsonUnmarshallerContext context) throws Exception { DescribeAssessmentTargetsResult describeAssessmentTargetsResult = new DescribeAssessmentTargetsResult(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return describeAssessmentTargetsResult; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("assessmentTargets", targetDepth)) { context.nextToken(); describeAssessmentTargetsResult.setAssessmentTargets(new ListUnmarshaller<AssessmentTarget>(AssessmentTargetJsonUnmarshaller.getInstance()) .unmarshall(context)); } if (context.testExpression("failedItems", targetDepth)) { context.nextToken(); describeAssessmentTargetsResult.setFailedItems(new MapUnmarshaller<String, FailedItemDetails>(context.getUnmarshaller(String.class), FailedItemDetailsJsonUnmarshaller.getInstance()).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return describeAssessmentTargetsResult; } private static DescribeAssessmentTargetsResultJsonUnmarshaller instance; public static DescribeAssessmentTargetsResultJsonUnmarshaller getInstance() { if (instance == null) instance = new DescribeAssessmentTargetsResultJsonUnmarshaller(); return instance; } }
package nil.ed.chatroom.service.support; import nil.ed.chatroom.common.PageResult; import java.util.List; import java.util.function.Supplier; public interface SelectPageHelper<T> extends Operator<PageResult<T>, List<T>> { SelectPageHelper<T> setPageNo(Integer pageNo); SelectPageHelper<T> setPageSize(Integer pageSize); SelectPageHelper<T> setCounter(Supplier<Integer> counter); }
package com.mayankrastogi.cs441.hw4.chessservice.engines.javaopenchess; import pl.art.lach.mateusz.javaopenchess.core.Squares; /** * Helper class to get the {@link Squares} denoted by the current square in the chess board. */ public class BoardSquare { public Squares x; public Squares y; public BoardSquare(Squares x, Squares y) { this.x = x; this.y = y; } /** * Constructs a {@link BoardSquare} from the algebraic notation of a square. * * @param squareNotation The algebraic notation of a square. */ public BoardSquare(String squareNotation) { StringBuilder strX = new StringBuilder("SQ_"); StringBuilder strY = new StringBuilder("SQ_"); strX.append(squareNotation.toUpperCase().charAt(0)); strY.append(squareNotation.charAt(1)); x = Squares.valueOf(strX.toString()); y = Squares.valueOf(strY.toString()); } @Override public String toString() { return x.toString().substring(3) + y.toString().substring(3); } }
package net.java.sip.communicator.impl.protocol.jabber.extensions.coin; import java.util.Map.Entry; import net.java.sip.communicator.impl.protocol.jabber.extensions.AbstractPacketExtension; import org.jitsi.gov.nist.core.Separators; import org.jivesoftware.smack.packet.PacketExtension; public class StatePacketExtension extends AbstractPacketExtension { public static final String ELEMENT_ACTIVE = "active"; public static final String ELEMENT_LOCKED = "locked"; public static final String ELEMENT_NAME = "conference-state"; public static final String ELEMENT_USER_COUNT = "user-count"; public static final String NAMESPACE = null; private int active = -1; private int locked = -1; private int userCount = 0; public StatePacketExtension() { super(NAMESPACE, ELEMENT_NAME); } public void setUserCount(int userCount) { this.userCount = userCount; } public void setActive(int active) { this.active = active; } public void setLocked(int locked) { this.locked = locked; } public int getUserCount() { return this.userCount; } public int getActive() { return this.active; } public int getLocked() { return this.locked; } public String toXML() { boolean z = true; StringBuilder bldr = new StringBuilder(); bldr.append(Separators.LESS_THAN).append(getElementName()).append(Separators.SP); if (getNamespace() != null) { bldr.append("xmlns='").append(getNamespace()).append(Separators.QUOTE); } for (Entry<String, String> entry : this.attributes.entrySet()) { bldr.append(Separators.SP).append((String) entry.getKey()).append("='").append((String) entry.getValue()).append(Separators.QUOTE); } bldr.append(Separators.GREATER_THAN); if (this.userCount != 0) { bldr.append(Separators.LESS_THAN).append(ELEMENT_USER_COUNT).append(Separators.GREATER_THAN).append(this.userCount).append("</").append(ELEMENT_USER_COUNT).append(Separators.GREATER_THAN); } if (this.active != -1) { boolean z2; StringBuilder append = bldr.append(Separators.LESS_THAN).append("active").append(Separators.GREATER_THAN); if (this.active > 0) { z2 = true; } else { z2 = false; } append.append(z2).append("</").append("active").append(Separators.GREATER_THAN); } if (this.locked != -1) { StringBuilder append2 = bldr.append(Separators.LESS_THAN).append(ELEMENT_LOCKED).append(Separators.GREATER_THAN); if (this.active <= 0) { z = false; } append2.append(z).append("</").append(ELEMENT_LOCKED).append(Separators.GREATER_THAN); } for (PacketExtension ext : getChildExtensions()) { bldr.append(ext.toXML()); } bldr.append("</").append(getElementName()).append(Separators.GREATER_THAN); return bldr.toString(); } }
package ru.job4j.synch.userstorage; import org.junit.Test; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class UserStorageTest { @Test public void whenAddUser() { UserStorage storage = new UserStorage(); User user = new User(1, 100); storage.add(user); User expected = storage.findById(1); assertThat(expected, is(user)); } @Test public void whenUpdateUser() { UserStorage storage = new UserStorage(); User user = new User(1, 100); storage.add(user); User updated = new User(1, 200); storage.update(updated); User expected = storage.findById(1); assertThat(expected, is(updated)); } @Test public void whenDeleteUser() { UserStorage storage = new UserStorage(); User user = new User(1, 100); storage.add(user); storage.delete(user); User expected = storage.findById(1); assertThat(expected, is(nullValue())); } @Test public void whenTransferBetweenUsersTrue() { UserStorage storage = new UserStorage(); User userFrom = new User(1, 100); User userTo = new User(2, 300); storage.add(userFrom); storage.add(userTo); assertTrue(storage.transfer(1, 2, 50)); } @Test public void whenTransferBetweenUsersFalse() { UserStorage storage = new UserStorage(); User userFrom = new User(1, 100); User userTo = new User(2, 300); storage.add(userFrom); storage.add(userTo); assertFalse(storage.transfer(2, 1, 500)); } @Test public void whenTransferBetweenUsersAndSeeTheirMoney() { UserStorage storage = new UserStorage(); User userFrom = new User(1, 100); User userTo = new User(2, 300); storage.add(userFrom); storage.add(userTo); storage.transfer(1, 2, 70); assertThat(storage.findById(1).getAmount(), is(30)); assertThat(storage.findById(2).getAmount(), is(370)); } }
package com.fasterxml.jackson.dataformat.avro.apacheimpl; import java.io.InputStream; import java.io.OutputStream; import java.lang.ref.SoftReference; import org.apache.avro.io.*; import com.fasterxml.jackson.core.JacksonException; /** * Simple helper class that contains extracted functionality for * simple encoder/decoder recycling. */ public final class ApacheCodecRecycler { protected final static DecoderFactory DECODER_FACTORY = DecoderFactory.get(); protected final static EncoderFactory ENCODER_FACTORY = EncoderFactory.get(); protected final static ThreadLocal<SoftReference<ApacheCodecRecycler>> _recycler = new ThreadLocal<SoftReference<ApacheCodecRecycler>>(); private BinaryDecoder decoder; private BinaryEncoder encoder; private ApacheCodecRecycler() { } /* /********************************************************************** /* Public API /********************************************************************** */ public static BinaryDecoder decoder(InputStream in, boolean buffering) { BinaryDecoder prev = _recycler().claimDecoder(); return buffering ? DECODER_FACTORY.binaryDecoder(in, prev) : DECODER_FACTORY.directBinaryDecoder(in, prev); } public static BinaryDecoder decoder(byte[] buffer, int offset, int len) { BinaryDecoder prev = _recycler().claimDecoder(); return DECODER_FACTORY.binaryDecoder(buffer, offset, len, prev); } public static BinaryEncoder encoder(OutputStream out, boolean buffering) { BinaryEncoder prev = _recycler().claimEncoder(); return buffering ? ENCODER_FACTORY.binaryEncoder(out, prev) : ENCODER_FACTORY.directBinaryEncoder(out, prev); } public static void release(BinaryDecoder dec) { _recycler().decoder = (BinaryDecoder) dec; } public static void release(BinaryEncoder enc) { _recycler().encoder = enc; } /* /********************************************************************** /* Internal per-instance methods /********************************************************************** */ private static ApacheCodecRecycler _recycler() { SoftReference<ApacheCodecRecycler> ref = _recycler.get(); ApacheCodecRecycler r = (ref == null) ? null : ref.get(); if (r == null) { r = new ApacheCodecRecycler(); _recycler.set(new SoftReference<ApacheCodecRecycler>(r)); } return r; } private BinaryDecoder claimDecoder() { BinaryDecoder d = decoder; decoder = null; return d; } private BinaryEncoder claimEncoder() { BinaryEncoder e = encoder; encoder = null; return e; } /* /********************************************************************** /* Helper class /********************************************************************** */ // 24-Jan-2021, tatu: Is this actually used? If not, maybe remove from Jackson 3.0 public static class BadSchemaException extends JacksonException { private static final long serialVersionUID = 1L; public BadSchemaException(String msg, Throwable src) { super(msg, src); } @Override public Object processor() { return null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.core.parse.integrate.jaxb.token; import lombok.Getter; import lombok.Setter; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; @Getter @Setter @XmlAccessorType(XmlAccessType.FIELD) public final class ExpectedInsertValuesToken { @XmlAttribute(name = "begin-position") private int beginPosition; @XmlAttribute(name = "type") private String type; }
package org.pimslims.bioinf.targets; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.StringTokenizer; import org.pimslims.bioinf.DBFetch; import org.pimslims.lab.Util; import org.pimslims.model.target.Target; /** * This class is designed to read the proteins from the tab delimited file (ProteinList) with protein names. * THe class tried to find and to download the appropriate SwissProt entries into the local folder (filePath) * In order to classify the proteins, class reads 4 files each of which contains the list of the proteins with * the same function. * * <PRE> * The example files is shown in the repository under data directory * subBindProt = Substrate binding proteins List * aTPBindProt = ATP binding proteins * membraneProt = Membrane Proteins * regulProt = regulatory proteins * </PRE> * * All other proteins are fall to the Substrate binding proteins (SUB_SPEC_PROT) category. This class * dependents on some methods in uk.ac.Util.class * * @see uk.ac.mpsi.Util * @author Petr Troshin <br> * Created on 11-Apr-2005 * @deprecated This class is no longer supported and does not reflect the changes regarding target recording * in PIMS */ @Deprecated public class UMTargetsRetrieval { /** * This ArrayList contains all proteins name extracted from the file */ ArrayList genesName; /** * This ArrayList contains all not found proteins */ ArrayList notFound; /** * This ArrayList contains all proteins with not direct match of its identifier to the database */ ArrayList notExactMatchFound; // /** * This ArrayList contains all proteins which has a properly identifier and can be directly found in the * database */ ArrayList found; /** * HashMap used to get the classification of the proteins, each map represent the list of the proteins * fall into one of the classification group. The classification is uploaded from the corresponding files * contains the list of the protein from one classification group. (Except Substrate specific proteins) * All protein which were not found into any other group fall into the category - Substrate specific * proteins. These HashMap are only populated and used when #getWorkPackage() method is called. It happens * when SwissProtTarges are uploading to the EBI Targets Tracker database. * * @see #getWorkPackage(String) */ /** * This HashMap contains Substrate Binding Proteins */ static final HashMap SUB_BIND_PROT = new HashMap(); /** * This HashMap contains ATP - binding proteins */ static final HashMap ATP_BIND_PROT = new HashMap(); /** * This HashMap contains membrane proteins (3.A group in TCDB classification) */ static final HashMap MEMBRANE_PROT = new HashMap(); /** * This HashMap contains Regulator proteins */ static final HashMap REGUL_PROT = new HashMap(); /** * This HashMap contains Substrate specific proteins */ static final HashMap SUB_SPEC_PROT = new HashMap(); /** * Where to get the information on the proteins types These files are only the sources for workpackage * classification and are only populated and used when #getWorkPackage() method is called. * * @see #ATP_BIND_PROT etc */ static final String subBindProt = "C:/Documents and Settings/pvt43/Desktop/targets_tracking/Excel_extracts/targets_Sub_build_prot.txt"; static final String aTPBindProt = "C:/Documents and Settings/pvt43/Desktop/targets_tracking/Excel_extracts/targets_ATP_bind_prot.txt"; static final String membraneProt = "C:/Documents and Settings/pvt43/Desktop/targets_tracking/Excel_extracts/targets_membr_prot.txt"; static final String regulProt = "C:/Documents and Settings/pvt43/Desktop/targets_tracking/Excel_extracts/targets_regul_prot.txt"; /** * Path to write the obtained SwissProt files */ static String filePath = "c:/temp/mpsi/"; /** * The path to the protein list */ static final String proteinList = "C:/Documents and Settings/pvt43/Desktop/targets_tracking/Excel_extracts/Targets_Database_clear1.txt"; /** * Load predefined types */ static { loadTypes(); } /** * Methods loads the list of proteins from files to the HashMap * * @param bf BufferReader * @param hm HashMap */ private static void loadType(BufferedReader bf, HashMap hm) { String line; try { while ((line = bf.readLine()) != null) { line = line.trim(); if (!Util.isEmpty(line)) { hm.put(line, ""); } } bf.close(); } catch (IOException ioe) { ioe.printStackTrace(); } } /** * Method loads the list of proteins from the files statically defined above to the defined above named * HashMaps. The hashmaps are the following: * * @see #SUB_BIND_PROT * @see #ATP_BIND_PROT * @see #MEMBRANE_PROT * @see #SUB_SPEC_PROT * @see #REGUL_PROT The files are the following: * @see #aTPBindProt * @see #membraneProt * @see #subBindProt * @see #regulProt */ private static void loadTypes() { try { BufferedReader bf = new BufferedReader(new FileReader(subBindProt)); BufferedReader bfATP = new BufferedReader(new FileReader(aTPBindProt)); BufferedReader bfmem = new BufferedReader(new FileReader(membraneProt)); BufferedReader bfregul = new BufferedReader(new FileReader(regulProt)); loadType(bf, SUB_BIND_PROT); loadType(bfATP, ATP_BIND_PROT); loadType(bfmem, MEMBRANE_PROT); loadType(bfregul, REGUL_PROT); } catch (FileNotFoundException fnfe) { fnfe.printStackTrace(); } } /** * In time of construction of UMTargetsRetrieval object the Target list passes to it. UMTargetsRetrieval * parses this list and downloads the proteins from it to the #filePath folder. * * @param targetsList */ public UMTargetsRetrieval(String targetsList) { try { parse(targetsList); getSummary(); } catch (IOException ioe) { ioe.printStackTrace(); } } /** * Parse the targetsList to the individual proteins, download appropriate SwissProt entries, write * statistics * * @param targetsList * @throws FileNotFoundException * @throws IOException */ private void parse(String targetsList) throws IOException { String line = ""; genesName = new ArrayList(); notFound = new ArrayList(); notExactMatchFound = new ArrayList(); found = new ArrayList(); BufferedReader br = new BufferedReader(new FileReader(targetsList)); while ((line = br.readLine()) != null) { if (Util.isEmpty(line.trim())) { continue; } StringTokenizer st = new StringTokenizer(line); String orgShortName = (String) st.nextElement(); while (st.hasMoreElements()) { String protName = (String) st.nextElement(); setOrganism(protName, orgShortName); String accession = getAccession(extractDesc(protName)); if (!Util.isEmpty(accession)) { String swissProtEntry = getSwissProtEntry(accession); if (!Util.isEmpty(swissProtEntry)) { Util.writeToFile(swissProtEntry, filePath + protName); } else { System.out.println("For Entry number: " + accession); System.out .println("Accession number has been found, but the sequence downloading has failed! " + "/n"); } } } } System.out.println("Total not direct matches: " + notExactMatchFound.size()); System.out.println("Total direct matches: " + found.size()); System.out.println("Total not found: " + notFound.size()); } /** * The method gets the SwissProt database accession number for the particular protein. * * @param geneLocNames response from the SwissProt search server. May contain the right entry, or search * results page. * @return accession number or null if nothing found. */ private String getAccession(String[] geneLocNames) throws IOException { int idxpa = -1; String data = DBFetch.getResponse("http://www.expasy.org/cgi-bin/sprot-search-de?" + geneLocNames[0]); if (data.indexOf("Search in Swiss-Prot and TrEMBL for:") >= 0 && data.indexOf("There are matches to") >= 0 && data.indexOf("There are matches to 0 ") < 0) { System.out.println("Not direct match. Look at it carefully! "); System.out.println("LocusName is: " + geneLocNames[0]); notExactMatchFound.add(geneLocNames[0]); return getAccessionFromSearchPage(data); } else if ((idxpa = data.indexOf("\"black\">Primary accession number")) >= 0) { System.out.println("Direct match! "); System.out.println("LocusName is: " + geneLocNames[0]); found.add(geneLocNames[0]); return getAccessionFromData(data.substring(idxpa)); } else { System.out.println("The protein could not be found on the basis of the provided data!"); System.out.println("The protein name is: " + geneLocNames[0] + "\n"); notFound.add(geneLocNames[0]); return ""; } } /** * The method gets the WorkPackage for EBI Targets tracker * * @param protName Name of the protein * @return The Workpackage for the EBI targets tracker */ public static String getWorkPackage(String protName) { if (ATP_BIND_PROT.containsKey(protName)) { return EBITarget.classWpackage.get("ABC.ATP"); } else if (MEMBRANE_PROT.containsKey(protName)) { return EBITarget.classWpackage.get("3.A."); } else if (REGUL_PROT.containsKey(protName)) { return EBITarget.classWpackage.get("ABC.R"); } else if (SUB_BIND_PROT.containsKey(protName)) { return EBITarget.classWpackage.get("ABC.S"); } else { return EBITarget.classWpackage.get("ABC.SS"); } } /** * Set the short name of the protein source organism. Can be used for verification purpose * * @param protName * @param orgShortName */ private void setOrganism(String protName, String orgShortName) { if (ATP_BIND_PROT.containsKey(protName)) { ATP_BIND_PROT.put(protName, orgShortName); } else if (MEMBRANE_PROT.containsKey(protName)) { MEMBRANE_PROT.put(protName, orgShortName); } else if (REGUL_PROT.containsKey(protName)) { REGUL_PROT.put(protName, orgShortName); } else if (SUB_BIND_PROT.containsKey(protName)) { SUB_BIND_PROT.put(protName, orgShortName); } else { SUB_SPEC_PROT.put(protName, orgShortName); } } /** * Print the summary of all downloaded entries to the stndout. */ private void getSummary() { System.out.println("Direct match! "); System.out.println("LocusName is: "); for (int i = 0; i < found.size(); i++) { System.out.println(found.get(i)); } System.out.println("The protein could not be found on the basis of the provided data!"); System.out.println("The protein name is: "); for (int i = 0; i < notFound.size(); i++) { System.out.println(notFound.get(i)); } System.out.println("Not direct match. Look at it carefully! "); System.out.println("LocusName is: "); for (int i = 0; i < notExactMatchFound.size(); i++) { System.out.println(notExactMatchFound.get(i)); } } /** * Retrieve the database accession number from the search page. * * @param rawResponce from SwissProt search system * @return protein accession number */ private static String getAccessionFromSearchPage(String rawResponce) { int ind = rawResponce.indexOf("niceprot.pl?"); rawResponce = rawResponce.toUpperCase(); rawResponce = rawResponce.substring(ind); ind = rawResponce.indexOf("<HR>"); rawResponce = rawResponce.substring(0, ind); rawResponce = rawResponce.substring(rawResponce.indexOf("<B>"), rawResponce.indexOf("</B>")); String protName = rawResponce.substring(3); System.out.println("Found protein accession is: " + protName + "\n"); return protName; } /** * Retrieve the accession number from SwissProt server response contained particular protein * * @param rawData protein information page rawData * @return protein database accession number */ private static String getAccessionFromData(String rawData) { // System.out.println(rawData); rawData = rawData.substring(rawData.indexOf("\"black\">Primary accession number")); // rawData = rawData.toLowerCase(); rawData = rawData.substring(0, rawData.indexOf("</tr>")); // System.out.println("after " + rawData); rawData = rawData.substring(rawData.indexOf("<b>"), rawData.indexOf("</b>")); String protName = rawData.substring(3); System.out.println("Found protein accession is: " + protName + "\n"); return protName; } /** * Method retrieve the SwissProt database entry by accession number * * @param accession SwissProt database accession * @return SwissProt entry */ public static String getSwissProtEntry(String accession) throws IOException { return DBFetch.getResponse("http://www.expasy.org/cgi-bin/get-sprot-raw.pl?" + accession); } /** * The method separate gene name and locusName record format is: src4563(pheA), src4563 - locus name pheA - * gene name * * @return String[0] - locus Name; String[1] - gene Name */ private String[] extractDesc(String geneLocName) { String[] geneLocNames = new String[2]; int startGeneName = geneLocName.indexOf("("); int endGeneName = geneLocName.indexOf(")"); if (startGeneName < 0 && endGeneName < 0) { geneLocNames[0] = geneLocName; return geneLocNames; } geneLocNames[0] = geneLocName.substring(0, startGeneName); geneLocNames[1] = geneLocName.substring(startGeneName, endGeneName); return geneLocNames; } /** * Start to retrieve the information on targets * * @param args - not in use */ public static void main(String[] args) throws Exception { // UMTargetsRetrieval ut = new UMTargetsRetrieval(proteinList); /* HashMap prop = (HashMap) */Target.class.getDeclaredField("initClassDict") .get("java.lang.HashMap"); /* * for (int i = 0; i < prop.getType().getDeclaredFields().length; i++) { * System.out.println(prop.getType().getDeclaredFields()[i]); } System.out.println(); */ } }
package com.adambates.wikisearch.http.models;
package com.alipay.api.domain; import com.alipay.api.AlipayObject; import com.alipay.api.internal.mapping.ApiField; /** * 商圈主页地址创建修改接口 * * @author auto create * @since 1.0, 2018-05-10 13:54:14 */ public class KoubeiShopMallPageModifyModel extends AlipayObject { private static final long serialVersionUID = 2429281239325783882L; /** * 商圈id */ @ApiField("mall_id") private String mallId; /** * 商圈访问地址 */ @ApiField("mall_url") private String mallUrl; /** * 废弃!,务传。 */ @ApiField("out_biz_id") private String outBizId; public String getMallId() { return this.mallId; } public void setMallId(String mallId) { this.mallId = mallId; } public String getMallUrl() { return this.mallUrl; } public void setMallUrl(String mallUrl) { this.mallUrl = mallUrl; } public String getOutBizId() { return this.outBizId; } public void setOutBizId(String outBizId) { this.outBizId = outBizId; } }
/* * File : ARNReduced.java * Created : 26-Dec-2009 * By : atrilla * * Emolib - Emotional Library * * Copyright (c) 2009 Alexandre Trilla & * 2007-2012 Enginyeria i Arquitectura La Salle (Universitat Ramon Llull) * * This file is part of Emolib. * * You should have received a copy of the rights granted with this * distribution of EmoLib. See COPYING. */ package emolib.classifier.machinelearning; import java.util.ArrayList; import java.lang.Math; import java.util.HashMap; //import java.util.List; //import java.io.*; import emolib.util.conf.*; import emolib.util.proc.*; import emolib.classifier.Classifier; import emolib.classifier.FeatureBox; //import org.jdom.*; //import org.jdom.output.*; //import org.jdom.input.*; import Jama.Matrix; import org.junit.Test; import org.junit.Assert; /** * The <i>ARNReduced</i> classifies according to a cosine similarity in * a weighted Vector Space Model with co-occurrences, which are assumed to * capture the style in text. * * <p> * The Associative Relational Network - Reduced is word co-occurrence * network-based model, see the figure below, which * constructs a Vector Space Model (VSM) with a term selection method * "on the fly" based on the observation * of test features (Al&iacute;as et al., 2008). * This term selection refinement is reported to improve the classical VSM * for classification. Dense vectors representing the input text and the * class are retrieved (no learning process is involved) and * evaluated by the cosine similarity measure. The basic * hypothesis in using the ARN-R for classification is the * contiguity hypothesis, where terms in the same class form * a contiguous region and regions of different classes do * not overlap. * </p> * <p> * <img src="doc-files/arn.png" width="300" * alt="Associative Relational Network - Reduced"/> * </p> * <p> * The ARN-R also provides several methods, i.e., criteria, * 1) to weight the features in order to enhance their * discriminative features, * and 2) to select the most relevant features in order to reduce * the sparsity in the VSM. These approaches intend to * simplify the model in order to generalise better. * </p> * <p> * In addition, the ARNReduced provides a classical VSM implementation * which enables the retrieval of sparse vectors, and therefore standardises * the interface to the textual features for any vector-based classifier. * </p> * <p> * --<br> * (Al&iacute;as et al., 2008) Francesc Al&iacute;as, Xavier Sevillano, Joan Claudi Socor&oacute; * and Xavier Gonzalvo, "Towards high quality next-generation Text-to-Speech synthesis: a * Multidomain approach by automatic domain classification", IEEE Transactions on Audio, Speech * and Language Processing (Special issue on New Approaches to Statistical Speech and Text Processing) * (ISSN 1558-7916), vol. 16 (7), pp. 1340-1354, September. * </p> * * @author Alexandre Trilla (atrilla@salle.url.edu) */ public class ARNReduced extends Classifier { /** * Property to indicate a pre-trained classifier. */ public final static String PROP_EXTERNAL_FILE = "external_file"; private String externalFile; // Classifier specific parameters private String termWeighingMeasure; private String similarityMeasure; private boolean assessCOF; private boolean assessPOS; private boolean assessSyns; private boolean assessStems; private boolean featSelMI; private boolean featSelChi2; private boolean featSelTF; private int numSelFeats; private ArrayList<Graph> categoryGraph; private Graph fullGraph; // private HashMap<String, Integer> soleCategories; private String[] categoryRanking; /** * Inner class representing an element of the graph. */ public class GraphElement { private boolean isNodeFlag; private boolean isLinkFlag; private int termFrequency; private String term, termLeft, termRight; private float weighedMeasure; private double utilityMeasure; /** * Constructor. */ public GraphElement() { isNodeFlag = false; isLinkFlag = false; termFrequency = 1; term = ""; termLeft = ""; termRight = ""; weighedMeasure = 1; utilityMeasure = 0; } /** * Method to set the utility measure. * * @param um The value of the utility measure. */ public void setUtilityMeasure(double um) { utilityMeasure = um; } /** * Function to retrieve the utility measure. * * @return The value of the utility measure. */ public double getUtilityMeasure() { return utilityMeasure; } /** * Method to set the weighed measure. * * @param wm The value of the weighed measure. */ public void setWeighedMeasure(float wm) { weighedMeasure = wm; } /** * Function to retrieve the weighed measure. * * @return The value of the weighed measure. */ public float getWeighedMeasure() { return weighedMeasure; } /** * Method to set this graph element as a node. */ public void setAsNode() { isNodeFlag = true; } /** * Method to set this graph element as a link. */ public void setAsLink() { isLinkFlag = true; } /** * Function to see if this graph element is a node. * * @return True if it is a node. */ public boolean isNode() { return isNodeFlag; } /** * Function to see if this graph element is a link. * * @return True if it is a link. */ public boolean isLink() { return isLinkFlag; } /** * Method to set the term of this node. * * @param theTerm The term to set. */ public void setTerm(String theTerm) { if (isNode()) { term = theTerm; } else { System.out.println("GraphElement ERROR! Trying to set a term to a link!"); } } /** * Method to set the left term of this link. * * @param theTerm The term to set. */ public void setLeftTerm(String theTerm) { if (isLink()) { termLeft = theTerm; } else { System.out.println("GraphElement ERROR! Trying to set a left term to a node!"); } } /** * Method to set the right term of this link. * * @param theTerm The term to set. */ public void setRightTerm(String theTerm) { if (isLink()) { termRight = theTerm; } else { System.out.println("GraphElement ERROR! Trying to set a right term to a node!"); } } /** * Method to get the term of this node. * * @return The term. */ public String getTerm() { String theTerm = ""; if (isNode()) { theTerm = term; } else { System.out.println("GraphElement ERROR! Trying to get a term from a link!"); } return theTerm; } /** * Method to get the left term of this link. * * @return The term. */ public String getLeftTerm() { String theTerm = ""; if (isLink()) { theTerm = termLeft; } else { System.out.println("GraphElement ERROR! Trying to get a left term from a node!"); } return theTerm; } /** * Method to get the right term of this link. * * @return The term. */ public String getRightTerm() { String theTerm = ""; if (isLink()) { theTerm = termRight; } else { System.out.println("GraphElement ERROR! Trying to get a right term from a node!"); } return theTerm; } /** * Method to add one count. */ public void addOneCount() { termFrequency++; } /** * Method to set the number of counts. * * @param tf The term frequency to set. */ public void setTermFrequency(int tf) { termFrequency = tf; } /** * Function to retrieve the number of counts. * * @return The number of counts (term frequency): */ public int getTermFrequency() { return termFrequency; } /** * Function to clone this element. * * @return A clone of this element. */ public GraphElement cloneElement() { GraphElement theClone = new GraphElement(); if (isNode()) { theClone.setAsNode(); theClone.setTerm(getTerm()); } else { theClone.setAsLink(); theClone.setLeftTerm(getLeftTerm()); theClone.setRightTerm(getRightTerm()); } theClone.setTermFrequency(getTermFrequency()); return theClone; } } /** * Generic graph inner class. * As a general rule, the graph contains the minimum amount of valuable information, i.e., * the term frequencies. For more enhanced IR measures the ARN should be able to manage * with these rates. */ public class Graph { private ArrayList<GraphElement> elementStruct; private String categoryName; /** * Graph constructor. */ public Graph() { elementStruct = new ArrayList<GraphElement>(); categoryName = ""; } /** * Function to retrieve the number of elements of this graph. * * @return The number of elements. */ public int getNumberOfElements() { return elementStruct.size(); } /** * Function to retrieve the number of nodes (ie words) of this graph. * * @return The number of nodes. */ public ArrayList<String> getListOfNodes() { ArrayList<String> listWords = new ArrayList<String>(); GraphElement graphElem; // for (int i = 0; i < getNumberOfElements(); i++) { graphElem = getElement(i); if (graphElem.isNode()) { listWords.add(graphElem.getTerm()); } } // return listWords; } /** * Function to retrieve a list of terms from this graph. * * @return The list of terms. */ public ArrayList<String> getArrayOfTerms() { GraphElement tmpElem; String auxTerm = ""; ArrayList<String> theTerms = new ArrayList<String>(); for (int elemNum = 0; elemNum < getNumberOfElements(); elemNum++) { tmpElem = getElement(elemNum); if (tmpElem.isNode()) { auxTerm = tmpElem.getTerm(); } else { auxTerm = tmpElem.getLeftTerm() + "_" + tmpElem.getRightTerm(); } theTerms.add(auxTerm); } // return theTerms; } /** * Method to add a category name to this graph. * * @param name The name. */ public void setCategoryName(String name) { categoryName = name; } /** * Function to get the category name of this graph. * * @return The category name of this graph. */ public String getCategoryName() { return categoryName; } /** * Function to retrieve the total sum of term frequencies in this structure. * * @return The total sum of term frequencies. */ public int getTotalSumTF() { int tfsum = 0; for (int elem = 0; elem < getNumberOfElements(); elem++) { tfsum += getElement(elem).getTermFrequency(); } return tfsum; } /** * Dump the content of this graph for debugging purposes. */ public void dumpContent() { GraphElement tmpElem; for (int elem = 0; elem < getNumberOfElements(); elem++) { tmpElem = getElement(elem); if (tmpElem.isNode()) { System.out.print("Node: " + tmpElem.getTerm()); System.out.println("\ttf = " + tmpElem.getTermFrequency()); } else { System.out.print("Link: " + tmpElem.getLeftTerm() + " - " + tmpElem.getRightTerm()); System.out.println("\ttf = " + tmpElem.getTermFrequency()); } } } /** * Method to add a node. * If the node to add is already available in the graph structure, its term frequency * is increased a unit. * * @param term The term of the node to add. */ public void addNode(String term) { term = term.trim(); boolean alreadyPresent = false; GraphElement temp; for (int i = 0; i < elementStruct.size(); i++) { temp = elementStruct.get(i); if (temp.isNode()) { if (temp.getTerm().equals(term)) { temp.addOneCount(); alreadyPresent = true; break; } } } if (!alreadyPresent) { temp = new GraphElement(); temp.setAsNode(); temp.setTerm(term); elementStruct.add(temp); } } /** * Method to add a node with its frequential information. * * @param term The node to add. */ public void addNode(GraphElement term) { boolean alreadyPresent = false; GraphElement temp; for (int i = 0; i < elementStruct.size(); i++) { temp = elementStruct.get(i); if (temp.isNode()) { if (temp.getTerm().equals(term.getTerm())) { temp.setTermFrequency(temp.getTermFrequency() + term.getTermFrequency()); alreadyPresent = true; break; } } } if (!alreadyPresent) { elementStruct.add(term.cloneElement()); } } /** * Function to check if the graph contains a specific node. * * @param nodeTerm The node term. * * @return True if the graph contains a specific node. */ public boolean containsNode(String nodeTerm) { boolean contains = false; for (int i = 0; i < elementStruct.size(); i++) { if (elementStruct.get(i).isNode()) { if (elementStruct.get(i).getTerm().equals(nodeTerm)) { contains = true; } } } return contains; } /** * Function to retrieve the term frequency of a given node. * * @param nodeTerm The node term. * * @return The TF of this node. */ public int getNodeTermFrequency(String nodeTerm) { int nodeTF = 0; GraphElement tempElement; for (int i = 0; i < elementStruct.size(); i++) { tempElement = elementStruct.get(i); if (tempElement.isNode() && tempElement.getTerm().equals(nodeTerm)) { nodeTF = tempElement.getTermFrequency(); break; } } return nodeTF; } /** * Method to add a link. * If the link to add is already available in the graph structure, * its term frequency is increased a unit. * * @param lTerm The left term of the link to add. * @param rTerm The right term of the link to add. */ public void addLink(String lTerm, String rTerm) { boolean alreadyPresent = false; GraphElement temp; for (int i = 0; i < elementStruct.size(); i++) { temp = elementStruct.get(i); if (temp.isLink()) { if (temp.getLeftTerm().equals(lTerm) && temp.getRightTerm().equals(rTerm)) { temp.addOneCount(); alreadyPresent = true; break; } } } if (!alreadyPresent) { temp = new GraphElement(); temp.setAsLink(); temp.setLeftTerm(lTerm); temp.setRightTerm(rTerm); elementStruct.add(temp); } } /** * Method to add a link with its frequential information. * * @param link The link to add. */ public void addLink(GraphElement link) { boolean alreadyPresent = false; GraphElement temp; for (int i = 0; i < elementStruct.size(); i++) { temp = elementStruct.get(i); if (temp.isLink()) { if (temp.getLeftTerm().equals(link.getLeftTerm()) && temp.getRightTerm().equals(link.getRightTerm())) { temp.setTermFrequency(temp.getTermFrequency() + link.getTermFrequency()); alreadyPresent = true; break; } } } if (!alreadyPresent) { elementStruct.add(link.cloneElement()); } } /** * Function to check if the graph contains a specific link. * * @param linkLeftTerm The link left term. * @param linkRightTerm The link right term. * * @return True if the graph contains a specific link. */ public boolean containsLink(String linkLeftTerm, String linkRightTerm) { boolean contains = false; for (int i = 0; i < elementStruct.size(); i++) { if (elementStruct.get(i).isLink()) { if (elementStruct.get(i).getLeftTerm().equals(linkLeftTerm) && elementStruct.get(i).getRightTerm().equals(linkRightTerm)) { contains = true; } } } return contains; } /** * Function to retrieve the term frequency of a given link. * * @param linkLeftTerm The link left term. * @param linkRightTerm The link right term. * * @return The TF of this link. */ public int getLinkTermFrequency(String linkLeftTerm, String linkRightTerm) { int linkTF = 0; GraphElement tempElement; for (int i = 0; i < elementStruct.size(); i++) { tempElement = elementStruct.get(i); if (tempElement.isLink() && tempElement.getLeftTerm().equals(linkLeftTerm) && tempElement.getRightTerm().equals(linkRightTerm)) { linkTF = tempElement.getTermFrequency(); break; } } return linkTF; } /** * Function to retrive the specified element of this graph. * * @param num Element number. * * @return The specified element. */ public GraphElement getElement(int num) { return elementStruct.get(num); } /** * Function to determine if it contains an element. * * @param elem The element under test. * * @return TRUE if it is contained. */ public boolean containsElement(GraphElement elem) { boolean contained = false; if (elem.isNode()) { contained = containsNode(elem.getTerm()); } else { contained = containsLink(elem.getLeftTerm(), elem.getRightTerm()); } return contained; } /** * Function to retrieve the TF of the given graph element, whatsoever * it is (node or term). * * @param elem The input graph element. * * @return The TF of the given graph element. */ public int getElementTermFrequency(GraphElement elem) { int etf = 0; if (elem.isNode()) { etf = getNodeTermFrequency(elem.getTerm()); } else { etf = getLinkTermFrequency(elem.getLeftTerm(), elem.getRightTerm()); } return etf; } /** * Function to export this graph as a vector of term frequencies. * * @return The vector of TFs. */ public int[] exportVectorTF() { int[] vec = new int[elementStruct.size()]; for (int i = 0; i < vec.length; i++) { vec[i] = elementStruct.get(i).getTermFrequency(); } return vec; } /** * Function to export this graph as a vector of weighed measures. * * @return The vector of weighed measures. */ public float[] exportWeighedVector() { float[] vec = new float[elementStruct.size()]; for (int i = 0; i < vec.length; i++) { vec[i] = elementStruct.get(i).getWeighedMeasure(); } return vec; } /** * Function to export this graph into a weighted matrix. * * @return The weighted matrix. */ public Matrix exportWeightedMatrix() { GraphElement gElem; int auxLocOne, auxLocTwo; ArrayList<String> listWords = getListOfNodes(); int numWords = listWords.size(); // Matrix of zeros Matrix wMatrix = new Matrix(numWords, numWords); for (int numE = 0; numE < elementStruct.size(); numE++) { gElem = getElement(numE); if (gElem.isNode()) { auxLocOne = listWords.indexOf(gElem.getTerm()); wMatrix.set(auxLocOne, auxLocOne, (double)gElem.getWeighedMeasure()); } else { auxLocOne = listWords.indexOf(gElem.getLeftTerm()); auxLocTwo = listWords.indexOf(gElem.getRightTerm()); wMatrix.set(auxLocOne, auxLocTwo, (double)gElem.getWeighedMeasure()); } } // return wMatrix; } /** * Method to add an element. * * @param elem The element to add. */ private void addElement(GraphElement elem) { elementStruct.add(elem); } /** * Function to clone this graph. * * @return A clone of this graph. */ public Graph cloneGraph() { Graph theClone = new Graph(); theClone.setCategoryName(getCategoryName()); for (int i = 0; i < getNumberOfElements(); i++) { theClone.addElement(getElement(i).cloneElement()); } return theClone; } /** * Method to reset the term frequencies of this graph. */ public void resetTermFrequencies() { GraphElement elem; for (int i = 0; i < getNumberOfElements(); i++) { getElement(i).setTermFrequency(0); } } /** * Prunes the graph by ordering terms wrt their utility and * removing the least useful. * * @param numSelected The number of useful terms desired. */ public void pruneUtility(int numSelected) { utilitySort(); while (getNumberOfElements() > numSelected) { elementStruct.remove(numSelected); } } /** * Utility sorting method. * The Bubble sort method is used. */ private void utilitySort() { boolean change; GraphElement auxElem; int elemCount; change = true; while (change) { change = false; for (elemCount = 0; elemCount < getNumberOfElements() - 2; elemCount++) { if (getElement(elemCount + 1).getUtilityMeasure() > getElement(elemCount).getUtilityMeasure()) { auxElem = getElement(elemCount); elementStruct.set(elemCount, getElement(elemCount + 1)); elementStruct.set(elemCount + 1, auxElem); change = true; } } } } /** * Removes a single element. * * @param index The index of the element to remove. */ public void removeElement(int index) { elementStruct.remove(index); } } /* (non-Javadoc) * @see emolib.util.conf.Configurable#register(java.lang.String, emolib.util.conf.Registry) */ public void register(String name, Registry registry) throws PropertyException { super.register(name, registry); registry.register(PROP_EXTERNAL_FILE, PropertyType.STRING); } /* (non-Javadoc) * @see emolib.util.conf.Configurable#newProperties(emolib.util.conf.PropertySheet) */ public void newProperties(PropertySheet ps) throws PropertyException { super.newProperties(ps); externalFile = ps.getString(PROP_EXTERNAL_FILE, "nullpath"); } /** * Method to initialize the Classifier. */ public void initialize() { if (externalFile.equals("nullpath")) { System.out.println("ARNReduced: no external file has been provided!"); System.exit(1); } else { load(externalFile); } } /** * Main constructor of this classifier. * It assigns, by default, the term frequency as the weigthting term method, * the cosine distance as the similarity measure and no co-ocurrence frquencies. */ public ARNReduced() { termWeighingMeasure = "tf"; similarityMeasure = "cos"; assessCOF = false; assessPOS = false; assessSyns = false; assessStems = false; featSelMI = false; featSelChi2 = false; featSelTF = false; numSelFeats = 0; categoryGraph = new ArrayList<Graph>(); fullGraph = new Graph(); categoryRanking = new String[]{"NEG", "NEU", "POS"}; } /** * Function to retrieve a list of the categories to deal with. * This function is important to iterate over the category labels * because the iterator returned by the hash map is not guaranteed * to remain constant over time. * * @return The list of the categories to deal with. */ public ArrayList<String> getCategoryList() { ArrayList<String> catList = new ArrayList<String>(); for (int c = 0; c < categoryGraph.size(); c++) { catList.add(categoryGraph.get(c).getCategoryName()); } // return catList; } /** * Function to retrieve a hash map of the categories to deal with. * * @return The hash map of the categories to deal with. */ public HashMap getCategoryHash() { return soleCategories; } /** * Function to recover the category-specific graphs. * * @return The list of category graphs. */ public ArrayList<Graph> getCategoryGraphs() { return categoryGraph; } /** * Function to recover the full vocabulary graph. * * @return The full graph. */ public Graph getVocabularyGraph() { return fullGraph; } /** * Function to retrieve the corpus size (number of words) of the given category. * That is, the sum of all term frequencies (terms considered to be words alone). * * @param cat The given category. * * @return The corpus size. */ public int getCorpusSize(String cat) { Graph theGraphOfInterest; int totalSize = 0; for (int i = 0; i < categoryGraph.size(); i++) { if (cat.equals(categoryGraph.get(i).getCategoryName())) { theGraphOfInterest = categoryGraph.get(i); for (int elemNum = 0; elemNum < theGraphOfInterest.getNumberOfElements(); elemNum++) { if (theGraphOfInterest.getElement(elemNum).isNode()) { totalSize += theGraphOfInterest.getElement(elemNum).getTermFrequency(); } } } break; } return totalSize; } /** * Function to retrieve the corpus size of tuples of the given category. * That is, the sum of all term frequencies (terms considered to be tuples). * * @param cat The given category. * * @return The corpus tuple size. */ public int getCorpusTupleSize(String cat) { Graph theGraphOfInterest; int totalSize = 0; for (int i = 0; i < categoryGraph.size(); i++) { if (cat.equals(categoryGraph.get(i).getCategoryName())) { theGraphOfInterest = categoryGraph.get(i); for (int elemNum = 0; elemNum < theGraphOfInterest.getNumberOfElements(); elemNum++) { if (theGraphOfInterest.getElement(elemNum).isLink()) { totalSize += theGraphOfInterest.getElement(elemNum).getTermFrequency(); } } } break; } return totalSize; } /** * Function to retrieve the number of terms (vocabulary size, words alone) * which frequency is greater than the given threshold, wrt a given category. * * @param wordFreqThreshold The word frequency treshold. * @param cat The given category. * * @return The vocabulary size. */ public int getVocabularySize(int wordFreqThreshold, String cat) { Graph theGraphOfInterest; int vocabSize = 0; for (int i = 0; i < categoryGraph.size(); i++) { if (cat.equals(categoryGraph.get(i).getCategoryName())) { theGraphOfInterest = categoryGraph.get(i); for (int elemNum = 0; elemNum < theGraphOfInterest.getNumberOfElements(); elemNum++) { if (theGraphOfInterest.getElement(elemNum).isNode()) { if (theGraphOfInterest.getElement(elemNum).getTermFrequency() >= wordFreqThreshold) { vocabSize++; } } } break; } } return vocabSize; } /** * Function to retrieve the number of terms (vocabulary size, bigrams alone) * which frequency is greater than the given threshold, wrt a given category. * * @param bigramFreqThreshold The bigram frequency treshold. * @param cat The given category. * * @return The vocabulary size. */ public int getBigramVocabularySize(int bigramFreqThreshold, String cat) { Graph theGraphOfInterest; int vocabSize = 0; for (int i = 0; i < categoryGraph.size(); i++) { if (cat.equals(categoryGraph.get(i).getCategoryName())) { theGraphOfInterest = categoryGraph.get(i); for (int elemNum = 0; elemNum < theGraphOfInterest.getNumberOfElements(); elemNum++) { if (theGraphOfInterest.getElement(elemNum).isLink()) { if (theGraphOfInterest.getElement(elemNum).getTermFrequency() >= bigramFreqThreshold) { vocabSize++; } } } break; } } return vocabSize; } /** * Function to retrieve a sorted list (in frequency descending order) of words. * The sorting algorithm of use is the bubble sort. * * @param cat The given category. * @param wList The list if words to produce. * @param fList The list of frequencies to produce. */ public void getOrderedList(String cat, ArrayList<String> wList, ArrayList<Integer> fList) { boolean listOK; Graph theGraphOfInterest; listOK = false; for (int i = 0; i < categoryGraph.size(); i++) { if (cat.equals(categoryGraph.get(i).getCategoryName())) { theGraphOfInterest = categoryGraph.get(i); for (int elemNum = 0; elemNum < theGraphOfInterest.getNumberOfElements(); elemNum++) { if (theGraphOfInterest.getElement(elemNum).isNode()) { wList.add(theGraphOfInterest.getElement(elemNum).getTerm()); fList.add(theGraphOfInterest.getElement(elemNum).getTermFrequency()); } } listOK = true; break; } } if (listOK) { bubbleSort(wList, fList); } else { System.out.println("EmoLib: ARNReduced: WARNING! No list has been created for sorting!"); System.exit(1); } } /** * Bubble sorting method. * * @param wList The list of terms. * @param fList The list of counts to sort in descending order. */ private void bubbleSort(ArrayList<String> wList, ArrayList<Integer> fList) { boolean change; int listCounter; String auxS; int auxI; change = true; while (change) { change = false; for (listCounter = 0; listCounter < wList.size() - 2; listCounter++) { if (fList.get(listCounter + 1).intValue() > fList.get(listCounter).intValue()) { auxS = wList.get(listCounter); auxI = fList.get(listCounter); wList.set(listCounter, wList.get(listCounter + 1)); fList.set(listCounter, fList.get(listCounter + 1)); wList.set(listCounter + 1, auxS); fList.set(listCounter + 1, auxI); change = true; } } } } /** * Function to retrieve a sorted list (in frequency descending order) of tuples. * The sorting algorithm of use is the bubble sort. * * @param cat The given category. * @param wList The list if tuples to produce. * @param fList The list of frequencies to produce. */ public void getOrderedTupleList(String cat, ArrayList<String> wList, ArrayList<Integer> fList) { boolean listOK; Graph theGraphOfInterest; String aux; listOK = false; for (int i = 0; i < categoryGraph.size(); i++) { if (cat.equals(categoryGraph.get(i).getCategoryName())) { theGraphOfInterest = categoryGraph.get(i); for (int elemNum = 0; elemNum < theGraphOfInterest.getNumberOfElements(); elemNum++) { if (theGraphOfInterest.getElement(elemNum).isLink()) { aux = theGraphOfInterest.getElement(elemNum).getLeftTerm() + "_" + theGraphOfInterest.getElement(elemNum).getRightTerm(); wList.add(aux); fList.add(theGraphOfInterest.getElement(elemNum).getTermFrequency()); } } listOK = true; break; } } if (listOK) { bubbleSort(wList, fList); } else { System.out.println("EmoLib: ARNReduced: WARNING! No list has been created for sorting!"); System.exit(1); } } /** * Method to set the term weighting measure. * * @param twm The term weighting measure. */ public void setTermWeighingMeasure(String twm) { termWeighingMeasure = twm; } /** * Method to set the similarity measure. * * @param simil The similarity measure. */ public void setSimilarityMeasure(String simil) { similarityMeasure = simil; } /** * Method to set the assessment of co-ocurrence frequencies (tuples actually). * * @param flag The set flag. */ public void setCOF(boolean flag){ assessCOF = flag; } /** * Method to set the assessment of POS tags (grammatical analysis). * * @param flag The set flag. */ public void setPOS(boolean flag) { assessPOS = flag; } /** * Method to set the assessment of synonyms. * * @param flag The set flag. */ public void setSynonyms(boolean flag) { assessSyns = flag; } /** * Method to set the assessment of stemmed terms. * * @param flag The set flag. */ public void setStems(boolean flag) { assessStems = flag; } /** * Method to set the Mutual Information global feature selection. * * @param flag The set flag. * @param numFeats The number of feats per class to select. */ public void setFeatSelMI(boolean flag, int numFeats) { featSelMI = flag; numSelFeats = numFeats; } /** * Method to set the Chi square global feature selection. * * @param flag The set flag. * @param numFeats The number of feats per class to select. */ public void setFeatSelChi2(boolean flag, int numFeats) { featSelChi2 = flag; numSelFeats = numFeats; } /** * Method to set the Term Frequency global feature selection. * * @param flag The set flag. * @param numFeats The number of feats per class to select. */ public void setFeatSelTF(boolean flag, int numFeats) { featSelTF = flag; numSelFeats = numFeats; } /** * Function to retrieve the similarity of a given text with a given category. * * @param inputText The given text. * @param cat The given category. * * @return The resulting similarity. */ public float getSimilarity(FeatureBox inputText, String cat) { float similarity = 0; Graph inputTextGraph; Graph emotionGraph; for (int i = 0; i < categoryGraph.size(); i++) { if (cat.equals(categoryGraph.get(i).getCategoryName())) { inputTextGraph = buildGraph(inputText); emotionGraph = buildEmotionGraph(inputTextGraph, i); applyTermWeighing(inputTextGraph, i); applyTermWeighing(emotionGraph, i); similarity = computeSimilarity(inputTextGraph, emotionGraph); break; } } return similarity; } /** * Method to weight the terms of corresponding to the model vector. * In some cases the Term Weighting method needs information * from the original domain model (e.g., the |T^k| in (Al&iacute;as et al., 2008) for the ITF). * That's the reason for using this method instead of the general applyTermWeighing. * * @param The given graph to weight. * @param The given catetory for supervised term weighting methods. */ public void applyModelTermWeighing(Graph inputGraph, int cat) { GraphElement tempElem; int tfOthers, tfCategory; double rf; if (termWeighingMeasure.equals("itf")) { for (int elem = 0; elem < inputGraph.getNumberOfElements(); elem++) { tempElem = inputGraph.getElement(elem); if (tempElem.getTermFrequency() != 0) { float tfalldomain = getTFAllModel(cat); tempElem.setWeighedMeasure((float)Math.log((double)tfalldomain / (double)tempElem.getTermFrequency())); } else { // Since tf = 0, ITF = inf, therefore 0 is assigned. tempElem.setWeighedMeasure(0); } } } else { // This function contemplates the ITF factor for its generic use, but since the program flow // has reached this point, there's no problem with it modifying the weights (otherwise // it would have computed the other part of the conditional). // The ITF code in this method is very similar, except for the model-based vectors. applyTermWeighing(inputGraph, cat); } } /** * Function to retrieve the sum of all term frequencies according to the given * category. * It's like the other corpus size getters, but regardless of the elements * being nodes or links (words or tuples). * * @param cat The given category. * * @return The total sum. */ private float getTFAllModel(int cat) { Graph model = categoryGraph.get(cat); return (float)model.getTotalSumTF(); } /** * Method to apply a term weighting methodology to the given graph. * In the case that the Term Weighting strategy of the the ARN is supervised, * a category is also provided. If no weighting strategy is needed, the input * graph will not be modified (it will just contain the default frequencies * of the terms within). * * @param The given graph to weight. * @param The given catetory for supervised term weighting methods. */ public void applyTermWeighing(Graph inputGraph, int cat) { GraphElement tempElem; int tfOthers, tfCategory; double rf; if (termWeighingMeasure.equals("tf")) { for (int elem = 0; elem < inputGraph.getNumberOfElements(); elem++) { tempElem = inputGraph.getElement(elem); tempElem.setWeighedMeasure((float)tempElem.getTermFrequency()); } } else if (termWeighingMeasure.equals("ltf")) { for (int elem = 0; elem < inputGraph.getNumberOfElements(); elem++) { tempElem = inputGraph.getElement(elem); tempElem.setWeighedMeasure((float)Math.log((double)1 + (double)tempElem.getTermFrequency())); } } else if (termWeighingMeasure.equals("binary")) { for (int elem = 0; elem < inputGraph.getNumberOfElements(); elem++) { tempElem = inputGraph.getElement(elem); if (tempElem.getTermFrequency() >= 1) { tempElem.setWeighedMeasure(1); } else { tempElem.setWeighedMeasure(0); } } } else if (termWeighingMeasure.equals("itf")) { // When tf = 0, problems need finer discussion, apart from the resulting indetermination double totalSumTF = (double)inputGraph.getTotalSumTF(); for (int elem = 0; elem < inputGraph.getNumberOfElements(); elem++) { tempElem = inputGraph.getElement(elem); if (tempElem.getTermFrequency() != 0) { tempElem.setWeighedMeasure((float)Math.log(totalSumTF / (double)tempElem.getTermFrequency())); } else { tempElem.setWeighedMeasure(0); } } // From E. Leopold and J. Kinderman, Text Categorisation with SVM, r=1 (ITF param) } else if (termWeighingMeasure.equals("itf_leopold")) { for (int elem = 0; elem < inputGraph.getNumberOfElements(); elem++) { tempElem = inputGraph.getElement(elem); tempElem.setWeighedMeasure((float)(1 - (float)1 / (float)(tempElem.getTermFrequency() + 1))); } } else if (termWeighingMeasure.equals("tfrf")) { for (int elem = 0; elem < inputGraph.getNumberOfElements(); elem++) { tempElem = inputGraph.getElement(elem); if (tempElem.getTermFrequency() > 0) { if (tempElem.isNode()) { if (categoryGraph.get(cat).containsNode(tempElem.getTerm())) { tfCategory = categoryGraph.get(cat).getNodeTermFrequency(tempElem.getTerm()); tfOthers = calcTFOthersSum(tempElem, cat); rf = Math.log((double)2 + ((double)tfCategory / (double)tfOthers)) / Math.log((double)2); tempElem.setWeighedMeasure((float)tempElem.getTermFrequency() * (float)rf); } else { tempElem.setWeighedMeasure((float)tempElem.getTermFrequency()); } } else if (tempElem.isLink()) { if (categoryGraph.get(cat).containsLink(tempElem.getLeftTerm(), tempElem.getRightTerm())) { tfCategory = categoryGraph.get(cat).getLinkTermFrequency(tempElem.getLeftTerm(), tempElem.getRightTerm()); tfOthers = calcTFOthersSum(tempElem, cat); rf = Math.log((double)2 + ((double)tfCategory / (double)tfOthers)) / Math.log((double)2); tempElem.setWeighedMeasure((float)tempElem.getTermFrequency() * (float)rf); } else { tempElem.setWeighedMeasure((float)tempElem.getTermFrequency()); } } else { System.out.println("ARNReduced: error weighting the terms!"); } } else { tempElem.setWeighedMeasure(0); } } } else if (termWeighingMeasure.equals("ltfrf")) { // This code is copied from above, except for the log(1 + tf) for (int elem = 0; elem < inputGraph.getNumberOfElements(); elem++) { tempElem = inputGraph.getElement(elem); if (tempElem.getTermFrequency() > 0) { if (tempElem.isNode()) { if (categoryGraph.get(cat).containsNode(tempElem.getTerm())) { tfCategory = categoryGraph.get(cat).getNodeTermFrequency(tempElem.getTerm()); tfOthers = calcTFOthersSum(tempElem, cat); rf = Math.log((double)2 + ((double)tfCategory / (double)tfOthers)) / Math.log((double)2); tempElem.setWeighedMeasure((float)Math.log((double)1 + (double)tempElem.getTermFrequency()) * (float)rf); } else { tempElem.setWeighedMeasure((float)Math.log((double)1 + (double)tempElem.getTermFrequency())); } } else if (tempElem.isLink()) { if (categoryGraph.get(cat).containsLink(tempElem.getLeftTerm(), tempElem.getRightTerm())) { tfCategory = categoryGraph.get(cat).getLinkTermFrequency(tempElem.getLeftTerm(), tempElem.getRightTerm()); tfOthers = calcTFOthersSum(tempElem, cat); rf = Math.log((double)2 + ((double)tfCategory / (double)tfOthers)) / Math.log((double)2); tempElem.setWeighedMeasure((float)Math.log((double)1 + (double)tempElem.getTermFrequency()) * (float)rf); } else { tempElem.setWeighedMeasure((float)Math.log((double)1 + (double)tempElem.getTermFrequency())); } } else { System.out.println("ARNReduced: error weighting the terms!"); } } else { tempElem.setWeighedMeasure(0); } } } else if (termWeighingMeasure.equals("crrf")) { // This code is copied from above, except for the sentiment rank double rankTFOthers; for (int elem = 0; elem < inputGraph.getNumberOfElements(); elem++) { tempElem = inputGraph.getElement(elem); if (tempElem.getTermFrequency() > 0) { if (tempElem.isNode()) { if (categoryGraph.get(cat).containsNode(tempElem.getTerm())) { tfCategory = categoryGraph.get(cat).getNodeTermFrequency(tempElem.getTerm()); rankTFOthers = calcTFOthersRankSum(tempElem, cat); rf = Math.log((double)2 + ((double)tfCategory / rankTFOthers)) / Math.log((double)2); tempElem.setWeighedMeasure((float)Math.log((double)1 + (double)tempElem.getTermFrequency()) * (float)rf); } else { tempElem.setWeighedMeasure((float)Math.log((double)1 + (double)tempElem.getTermFrequency())); } } else if (tempElem.isLink()) { if (categoryGraph.get(cat).containsLink(tempElem.getLeftTerm(), tempElem.getRightTerm())) { tfCategory = categoryGraph.get(cat).getLinkTermFrequency(tempElem.getLeftTerm(), tempElem.getRightTerm()); rankTFOthers = calcTFOthersRankSum(tempElem, cat); rf = Math.log((double)2 + ((double)tfCategory / rankTFOthers)) / Math.log((double)2); tempElem.setWeighedMeasure((float)Math.log((double)1 + (double)tempElem.getTermFrequency()) * (float)rf); } else { tempElem.setWeighedMeasure((float)Math.log((double)1 + (double)tempElem.getTermFrequency())); } } else { System.out.println("ARNReduced: error weighting the terms!"); } } else { tempElem.setWeighedMeasure(0); } } } else if (termWeighingMeasure.equals("ltfrfditf")) { // Dual ltfrf and itf. This is the one inspired by the old unsupported falias' code. // This only makes sense weighting the input graph, where the terms have some tf // within the sentence, but may be found (or not) in the given model. Instead, // for model vectors, all terms will be found in the model (it would be stupid // otherwise), and those which are missed, will always have a tf = 0, and thus // will not be weighted. for (int elem = 0; elem < inputGraph.getNumberOfElements(); elem++) { tempElem = inputGraph.getElement(elem); if (tempElem.isNode()) { if (categoryGraph.get(cat).containsNode(tempElem.getTerm())) { // Apply ltfrf tfCategory = categoryGraph.get(cat).getNodeTermFrequency(tempElem.getTerm()); tfOthers = calcTFOthersSum(tempElem, cat); rf = Math.log((double)2 + ((double)tfCategory / (double)tfOthers)) / Math.log((double)2); tempElem.setWeighedMeasure((float)Math.log((double)1 + (double)tempElem.getTermFrequency()) * (float)rf); } else { // Apply ITF if (tempElem.getTermFrequency() == 0) { tempElem.setWeighedMeasure(0); } else { tempElem.setWeighedMeasure((float)Math.log((double)inputGraph.getTotalSumTF() / (double)tempElem.getTermFrequency())); } } } else if (tempElem.isLink()) { if (categoryGraph.get(cat).containsLink(tempElem.getLeftTerm(), tempElem.getRightTerm())) { tfCategory = categoryGraph.get(cat).getLinkTermFrequency(tempElem.getLeftTerm(), tempElem.getRightTerm()); tfOthers = calcTFOthersSum(tempElem, cat); rf = Math.log((double)2 + ((double)tfCategory / (double)tfOthers)) / Math.log((double)2); tempElem.setWeighedMeasure((float)Math.log((double)1 + (double)tempElem.getTermFrequency()) * (float)rf); } else { if (tempElem.getTermFrequency() == 0) { tempElem.setWeighedMeasure(0); } else { tempElem.setWeighedMeasure((float)Math.log((double)inputGraph.getTotalSumTF() / (double)tempElem.getTermFrequency())); } } } else { System.out.println("ARNReduced: error weighting the terms!"); } } } else { System.out.println("ARNReduced: an error accurred with the term weighting measure definition!"); } } /** * Function to calculate the tf sum for all categories except for the given one. * * @param term The term to search in the affective dataset. * @param ex The exception category. * * @return The sum for the rest of the categories. */ private int calcTFOthersSum(GraphElement term, int ex) { int sum = 0; for (int i = 0; i < categoryGraph.size(); i++) { if (i != ex) { if (term.isNode()) { if (categoryGraph.get(i).containsNode(term.getTerm())) { sum += categoryGraph.get(i).getNodeTermFrequency(term.getTerm()); } } else if (term.isLink()) { if (categoryGraph.get(i).containsLink(term.getLeftTerm(), term.getRightTerm())) { sum += categoryGraph.get(i).getLinkTermFrequency(term.getLeftTerm(), term.getRightTerm()); } } else { System.out.println("ARNReduced: error weighting the terms!"); } } } if (sum == 0) { // This is for the max(1, sum) in order to avoid a division by zero sum = 1; } return sum; } /** * Function to calculate the tf sum for all categories except for the given one * considering the rank within the categories. * * @param term The term to search in the affective dataset. * @param ex The exception category. * * @return The sum for the rest of the categories. */ private double calcTFOthersRankSum(GraphElement term, int ex) { double totalSum = 0; double sum; for (int i = 0; i < categoryGraph.size(); i++) { if (i != ex) { sum = 0; if (term.isNode()) { if (categoryGraph.get(i).containsNode(term.getTerm())) { sum += (double)categoryGraph.get(i). getNodeTermFrequency(term.getTerm()); } } else if (term.isLink()) { if (categoryGraph.get(i).containsLink(term.getLeftTerm(), term.getRightTerm())) { sum += (double)categoryGraph.get(i).getLinkTermFrequency( term.getLeftTerm(), term.getRightTerm()); } } else { System.out.println("ARNReduced: error weighting the terms!"); System.exit(1); } totalSum += categoryRankDifference(i, ex) * sum; } } if (totalSum == 0) { // This is for the max(1, sum) in order to avoid a division by zero totalSum = 1; } return totalSum; } /** * Calculates the relevance of the category ranks. * * @param c1 The first category. * @param c2 The second category. * * @return The rank-weighted TF sum. */ private double categoryRankDifference(int c1, int c2) { String cOne = categoryGraph.get(c1).getCategoryName().trim(); String cTwo = categoryGraph.get(c2).getCategoryName().trim(); int index1 = 0; int index2 = 0; for (int i = 0; i < categoryRanking.length; i++) { if (categoryRanking[i].equals(cOne)) { index1 = i; } if (categoryRanking[i].equals(cTwo)) { index2 = i; } } double rankdif = (double)Math.abs(index1 - index2); if (rankdif == 0) { System.out.println("Rank error!"); System.exit(1); } rankdif = Math.pow(3, rankdif); return rankdif; } /** * Function to build a graph from input features. * * @param inputFeatures The input features. * * @return The resulting graph. */ public Graph buildGraph(FeatureBox inputFeatures) { String theWords = inputFeatures.getWords(); String[] wordChunks = theWords.split(" "); String posTags = inputFeatures.getPOSTags(); String[] posChunks = posTags.split(" "); String stems = inputFeatures.getStems(); String[] stemChunks = stems.split(" "); Graph textGraph = new Graph(); if (assessPOS) { if (assessStems) { textGraph.addNode(stemChunks[0] + "_" + posChunks[0]); } else { textGraph.addNode(wordChunks[0] + "_" + posChunks[0]); } } else { if (assessStems) { textGraph.addNode(stemChunks[0]); } else { textGraph.addNode(wordChunks[0]); } } for (int i = 1; i < wordChunks.length; i++) { if (assessPOS) { if (assessCOF) { if (assessStems) { textGraph.addLink(stemChunks[i - 1] + "_" + posChunks[i - 1], stemChunks[i] + "_" + posChunks[i]); } else { textGraph.addLink(wordChunks[i - 1] + "_" + posChunks[i - 1], wordChunks[i] + "_" + posChunks[i]); } } if (assessStems) { textGraph.addNode(stemChunks[i] + "_" + posChunks[i]); } else { textGraph.addNode(wordChunks[i] + "_" + posChunks[i]); } } else { if (assessCOF) { if (assessStems) { textGraph.addLink(stemChunks[i - 1], stemChunks[i]); } else { textGraph.addLink(wordChunks[i - 1], wordChunks[i]); } } if (assessStems) { textGraph.addNode(stemChunks[i]); } else { textGraph.addNode(wordChunks[i]); } } } // No COF with synonyms, as they are not directly observed in the text. if (inputFeatures.containsSynonyms()) { String syns = inputFeatures.getSynonyms(); String[] synsChunks = syns.split(" "); String ssyns = inputFeatures.getStemmedSynonyms(); String[] ssynsChunks = ssyns.split(" "); if (assessSyns) { for (int i = 0; i < synsChunks.length; i++) { if (assessPOS) { if (assessStems) { textGraph.addNode(ssynsChunks[i] + "_NOMBRE"); } else { textGraph.addNode(synsChunks[i] + "_NOMBRE"); } } else { if (assessStems) { textGraph.addNode(ssynsChunks[i]); } else { textGraph.addNode(synsChunks[i]); } } } } } // return textGraph; } /** * Function to build a full graph with the term frequencies given by * the input terms. * * @param input The input text graph. * * @return The full graph. */ public Graph buildFullGraph(Graph input) { Graph fullNet = fullGraph.cloneGraph(); fullNet.resetTermFrequencies(); GraphElement temp; for (int i = 0; i < fullNet.getNumberOfElements(); i++) { temp = fullNet.getElement(i); if (temp.isNode()) { if (input.containsNode(temp.getTerm())) { temp.setTermFrequency(input.getNodeTermFrequency(temp.getTerm())); } else { temp.setTermFrequency(0); } } else if (temp.isLink()) { if (input.containsLink(temp.getLeftTerm(), temp.getRightTerm())) { temp.setTermFrequency(input.getLinkTermFrequency(temp.getLeftTerm(), temp.getRightTerm())); } else { temp.setTermFrequency(0); } } else { System.out.println("ARNReduced: ERROR One element of input graph is not " + "defined neither as a node or a link!"); } } // return fullNet; } /** * Function to build an emotion graph, a structure with the terms defined by the * input text and the term frequencies given by an emotion class. * * @param input The input text graph. * @param catNumber The category identifier. * * @return The emotion graph. */ private Graph buildEmotionGraph(Graph input, int catNumber) { Graph emotionGraph = input.cloneGraph(); Graph categorySpecificGraph = categoryGraph.get(catNumber); GraphElement temp; for (int i = 0; i < emotionGraph.getNumberOfElements(); i++) { temp = emotionGraph.getElement(i); if (temp.isNode()) { if (categorySpecificGraph.containsNode(temp.getTerm())) { temp.setTermFrequency(categorySpecificGraph.getNodeTermFrequency(temp.getTerm())); } else { temp.setTermFrequency(0); } } else if (temp.isLink()) { if (categorySpecificGraph.containsLink(temp.getLeftTerm(), temp.getRightTerm())) { temp.setTermFrequency(categorySpecificGraph.getLinkTermFrequency(temp.getLeftTerm(), temp.getRightTerm())); } else { temp.setTermFrequency(0); } } else { System.out.println("ARNReduced: ERROR One element of input graph is not " + "defined neither as a node or a link!"); } } return emotionGraph; } /** * Function to compute the similarity between two graphs. * The graphs may be vectorised and used to compute a distributional similarity measure, like * the cosine, or directly processed to compute a network-based similarity measure, like * a graph distance. * * @param testGraph The test graph. * @param emoGraph The emotion specific graph. * * @return The similarity between the two graphs. */ private float computeSimilarity(Graph testGraph, Graph emoGraph) { float similarity = 0; if (similarityMeasure.equals("cos")) { // Compute the similarity with the cosine distance obtained with the Law of Cosines float vTestNorm = computeNorm(testGraph.exportWeighedVector()); float vEmoNorm = computeNorm(emoGraph.exportWeighedVector()); float vDifNorm = computeNorm(testGraph.exportWeighedVector(), emoGraph.exportWeighedVector()); similarity = (vTestNorm * vTestNorm + vEmoNorm * vEmoNorm - vDifNorm * vDifNorm) / (2 * vTestNorm * vEmoNorm); } else if (similarityMeasure.equals("dotprod")) { float[] vTest = testGraph.exportWeighedVector(); float[] vEmo = emoGraph.exportWeighedVector(); for (int i = 0; i < vTest.length; i++) { similarity += vTest[i] * vEmo[i]; } } else if (similarityMeasure.equals("matrixdifnorm2")) { // The inverse of the norm of the difference matrix is taken for a similarity Matrix wMatText = testGraph.exportWeightedMatrix(); Matrix wMatEmo = emoGraph.exportWeightedMatrix(); similarity = (float)1 / (float)((wMatText.minus(wMatEmo)).norm2()); } else if (similarityMeasure.equals("cosMatrix")) { // Compute the similarity with the matrix cosine distance with the Frobenius norm // It is the same as the cosine distance of the vectorised matrices that represents the graphs Matrix wMatText = testGraph.exportWeightedMatrix(); Matrix wMatEmo = emoGraph.exportWeightedMatrix(); similarity = (float)((wMatText.transpose().times(wMatEmo).trace()) / (wMatText.normF() * wMatEmo.normF())); } else { System.out.println("ARNReduced: en error occurred with the similarity measure!"); } return similarity; } /** * Function to compute the norm of the input vector. * * @param inputVector The input vector. * * @return The norm. */ private float computeNorm(float[] inputVector) { // Euclidean norm float theNorm = 0; for (int i = 0; i < inputVector.length; i++) { theNorm += inputVector[i] * inputVector[i]; } return (float)Math.sqrt((double)theNorm); } /** * Function to compute the norm of the vector difference of input vectors. * * @param inputVectorOne The first input vector. * @param inputVectortTwo The second input vector. * * @return The norm of the difference vector. */ private float computeNorm(float[] inputVectorOne, float[] inputVectortTwo) { // Euclidean norm float theNorm = 0; if (inputVectorOne.length != inputVectortTwo.length) { System.out.println("ARNReduced: vector lengths don't match! Norm cannot be computed"); } else { float[] difVect = new float[inputVectortTwo.length]; for (int i = 0; i < difVect.length; i++) { difVect[i] = inputVectorOne[i] - inputVectortTwo[i]; } theNorm = computeNorm(difVect); } return theNorm; } /* (non-Javadoc) * @see emolib.classifier.Classifier#getCategory(emolib.classifier.FeatureBox) */ public String getCategory(FeatureBox inputFeatures) { String mostProbableCategory = categoryGraph.get(0).getCategoryName(); float similarity = getSimilarity(inputFeatures, mostProbableCategory); for (int i = 1; i < categoryGraph.size(); i++) { if (getSimilarity(inputFeatures, categoryGraph.get(i).getCategoryName()) > similarity) { mostProbableCategory = categoryGraph.get(i).getCategoryName(); similarity = getSimilarity(inputFeatures, categoryGraph.get(i).getCategoryName()); } } return mostProbableCategory; } /* (non-Javadoc) * @see emolib.classifier.Classifier#trainingProcedure() */ public void trainingProcedure() { ArrayList<FeatureBox> exampleFeatures = getListOfExampleFeatures(); ArrayList<String> exampleCategories = getListOfExampleCategories(); soleCategories = new HashMap<String, Integer>(); int categoryOrder = 0; int presentClassNumber; String presentCat; FeatureBox presentFeats; Graph temp, emoTemp; GraphElement tempElem; for (int num_examples = 0; num_examples < exampleFeatures.size(); num_examples++) { presentCat = exampleCategories.get(num_examples); presentFeats = exampleFeatures.get(num_examples); if (!soleCategories.containsKey(presentCat)) { soleCategories.put(presentCat, new Integer(categoryOrder)); presentClassNumber = categoryOrder; categoryOrder++; temp = new Graph(); temp.setCategoryName(presentCat); categoryGraph.add(temp); } else { presentClassNumber = soleCategories.get(presentCat).intValue(); } temp = buildGraph(presentFeats); emoTemp = categoryGraph.get(presentClassNumber); for (int i = 0; i < temp.getNumberOfElements(); i++) { tempElem = temp.getElement(i); if (tempElem.isNode()) { emoTemp.addNode(tempElem); fullGraph.addNode(tempElem); } else { emoTemp.addLink(tempElem); fullGraph.addLink(tempElem); } } } // If one term selection method is set... if (featSelMI || featSelChi2 || featSelTF) { globalFeatSel(); } } /** * Global feature selection procedure. * Based on (Manning, et al., 2008). It uses term frequencies instead * of document counts. * --<br> * (Manning, et al., 2008) Manning, C. D., Raghavan, P. and * Schutze, H., "An Introduction to Information Retrieval", 2008. */ private void globalFeatSel() { double N00, N01, N10, N11; double N = (double)fullGraph.getTotalSumTF(); for (int c = 0; c < categoryGraph.size(); c++) { // Compute term feature utility for (int t = 0; t < categoryGraph.get(c).getNumberOfElements(); t++) { N11 = (double)categoryGraph.get(c).getElement(t). getTermFrequency(); N10 = (double)calcTFOthersSum(categoryGraph.get(c). getElement(t), c); N01 = (double)(categoryGraph.get(c).getTotalSumTF() - N11); N00 = (double)(N - N11 - N10 - N01); if (featSelMI) { categoryGraph.get(c).getElement(t).setUtilityMeasure( (N11 / N) * Math.log((N * N11) / ((N10 + N11) * (N01 + N11))) / Math.log((double)2) + (N01 / N) * Math.log((N * N01) / ((N00 + N01) * (N01 + N11))) / Math.log((double)2) + (N10 / N) * Math.log((N * N10) / ((N10 + N11) * (N00 + N10))) / Math.log((double)2) + (N00 / N) * Math.log((N * N00) / ((N00 + N01) * (N00 + N10))) / Math.log((double)2)); } else if (featSelChi2) { categoryGraph.get(c).getElement(t).setUtilityMeasure( N * Math.pow((N11 * N00 - N10 * N01), (double)2) / ((N11 + N01) * (N11 + N10) * (N10 + N00) * (N01 + N00))); } else if (featSelTF) { categoryGraph.get(c).getElement(t).setUtilityMeasure(N11); } } } for (int c = 0; c < categoryGraph.size(); c++) { // Sort list // Prune list keeping the first numFeats features. categoryGraph.get(c).pruneUtility(numSelFeats); } // Update full graph -> vocabulary GraphElement tmpElement; boolean found; ArrayList<Integer> elemsToRemove = new ArrayList<Integer>(); for (int t = 0; t < fullGraph.getNumberOfElements(); t++) { tmpElement = fullGraph.getElement(t); found = false; for (int c = 0; c < categoryGraph.size(); c++) { if (categoryGraph.get(c).containsElement(tmpElement)) { found = true; break; } } if (!found) { elemsToRemove.add(new Integer(t)); } } for (int rem = elemsToRemove.size() - 1; rem >= 0; rem--) { fullGraph.removeElement(elemsToRemove.get(rem).intValue()); } } /* (non-Javadoc) * @see emolib.classifier.Classifier#save(java.lang.String) */ public void save(String path) { } /* (non-Javadoc) * @see emolib.classifier.Classifier#load(java.lang.String) */ public void load(String path) { } /* (non-Javadoc) * @see emolib.classifier.Classifier#resetExamples() */ @Override public void resetExamples() { super.resetExamples(); categoryGraph = new ArrayList<Graph>(); } /** * Functionality test. */ @Test public void simpleClassification() { ARNReduced arnr = new ARNReduced(); FeatureBox feat = new FeatureBox(); feat.setWords("I hate going to the dentist ."); arnr.inputTrainingExample(feat, "NEG"); // feat = new FeatureBox(); feat.setWords("I swim a lot ."); arnr.inputTrainingExample(feat, "NEU"); // feat = new FeatureBox(); feat.setWords("I love reading books ."); arnr.inputTrainingExample(feat, "POS"); // arnr.train(); // feat = new FeatureBox(); feat.setWords("I like my dentist ."); Assert.assertTrue(arnr.getCategory(feat).equals("NEG")); // feat = new FeatureBox(); feat.setWords("You love ."); Assert.assertTrue(arnr.getCategory(feat).equals("POS")); } }
package com.kangmin.todolist.util; public final class ModelAttributes { public static final String WELCOME_MESSAGE = "welcome_message"; public static final String MESSAGE = "message"; public static final String ERROR = "error"; public static final String USER = "user"; public static final String USER_DTO = "user_dto"; public static final String USER_DTO_PWD = "user_dto_pwd"; public static final String LOGIN_MESSAGE = "login_message"; public static final String REGISTER_ERROR = "register_error"; public static final String HOME_MESSAGE = "home_message"; public static final String HOME_ERROR = "home_error"; public static final String UNAUTHORIZED_MESSAGE = "unauthorized_message"; public static final String ADMIN_HOME_MESSAGE = "admin_home_message"; public static final String ADMIN_HOME_ERROR = "admin_home_error"; public static final String ALL_USERS = "all_users"; public static final String ACTIVE_USERS = "active_users"; public static final String DISABLED_USERS = "disabled_users"; public static final String TO_DO_ITEM_DTO = "draft_item"; public static final String TO_DO_ITEMS = "to_do_items"; public static final String INACTIVE_TO_DO_ITEMS = "backlogItems"; public static final String NUM_OF_TO_DO_ITEMS = "number_of_to_do_items"; public static final String NUM_OF_INACTIVE_ITEMS = "number_of_inactive_items"; // == form test == public static final String STUDENT = "student"; public static final String STUDENT_HELLO_MESSAGE = "helloMessage"; protected ModelAttributes() { super(); } }
/** * @Title UserAuthenticationFilter.java * @date 2017-12-4 上午10:46:12 * @Copyright: 2017 */ package com.howard.base.security; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import org.apache.shiro.SecurityUtils; import org.apache.shiro.subject.Subject; import org.apache.shiro.web.filter.authc.UserFilter; /** * 验证用户是否在线 用户已验证通过 * * @author Chenjx * @version 1.0 */ public class UserAuthenticationFilter extends UserFilter { /* * (non-Javadoc) * * @see * org.apache.shiro.web.filter.authc.UserFilter#isAccessAllowed(javax.servlet * .ServletRequest, javax.servlet.ServletResponse, java.lang.Object) */ @Override protected boolean isAccessAllowed(ServletRequest request, ServletResponse response, Object mappedValue) { HttpServletRequest req = (HttpServletRequest) request; System.out.println("验证登陆信息==" + req.getRequestURI()); System.out.println("sessionId==" + req.getSession().getId()); Subject currentUser = SecurityUtils.getSubject(); if (null != currentUser) { if (currentUser.isAuthenticated() || currentUser.isRemembered()) { return true; } } // 返回false表示不执行后续的过滤器 return false; } }
package com.mattstine.dddworkshop.pizzashop.delivery; import com.mattstine.dddworkshop.pizzashop.infrastructure.events.ports.EventLog; import com.mattstine.dddworkshop.pizzashop.infrastructure.events.ports.Topic; import com.mattstine.dddworkshop.pizzashop.infrastructure.repository.adapters.InProcessEventSourcedRepository; import com.mattstine.dddworkshop.pizzashop.kitchen.KitchenOrderRef; import java.util.HashMap; import java.util.Map; /** * @author Matt Stine */ final class InProcessEventSourcedDeliveryOrderRepository extends InProcessEventSourcedRepository<DeliveryOrderRef, DeliveryOrder, DeliveryOrder.OrderState, DeliveryOrderEvent, DeliveryOrderAddedEvent> implements DeliveryOrderRepository { private final Map<KitchenOrderRef, DeliveryOrderRef> kitchenOrderRefToDeliveryOrderRef; InProcessEventSourcedDeliveryOrderRepository(EventLog eventLog, Topic topic) { super(eventLog, DeliveryOrderRef.class, DeliveryOrder.class, DeliveryOrder.OrderState.class, DeliveryOrderAddedEvent.class, topic); kitchenOrderRefToDeliveryOrderRef = new HashMap<>(); eventLog.subscribe(topic, e -> { if (e instanceof DeliveryOrderAddedEvent) { kitchenOrderRefToDeliveryOrderRef.put(((DeliveryOrderAddedEvent) e).getState().getKitchenOrderRef(), ((DeliveryOrderAddedEvent) e).getRef()); } }); } @Override public DeliveryOrder findByKitchenOrderRef(KitchenOrderRef kitchenOrderRef) { DeliveryOrderRef ref = kitchenOrderRefToDeliveryOrderRef.get(kitchenOrderRef); if (ref != null) { return this.findByRef(ref); } return null; } }
package org.uniprot.api.idmapping.service; import static java.util.Collections.emptyList; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.uniprot.api.idmapping.model.PredefinedIdMappingStatus.ENRICHMENT_WARNING; import static org.uniprot.api.idmapping.service.PIRResponseConverter.isValidIdPattern; import java.util.List; import java.util.stream.Stream; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.client.HttpServerErrorException; import org.uniprot.api.idmapping.controller.request.IdMappingJobRequest; import org.uniprot.api.idmapping.model.IdMappingResult; import org.uniprot.api.idmapping.model.IdMappingStringPair; class PIRResponseConverterTest { private PIRResponseConverter converter; private IdMappingJobRequest request; @BeforeEach void setUp() { converter = new PIRResponseConverter(); request = new IdMappingJobRequest(); request.setTo("EMBL"); } @Test void httpNot200CausesEmpty() { ResponseEntity<String> responseEntity = ResponseEntity.status(HttpStatus.BAD_REQUEST).body("WRONG"); assertThrows( HttpServerErrorException.class, () -> converter.convertToIDMappings(request, 20, 40, responseEntity)); } @Test void emptyResponseGivesEmptyResult() { ResponseEntity<String> responseEntity = ResponseEntity.status(HttpStatus.OK).build(); IdMappingResult result = converter.convertToIDMappings(request, 20, 40, responseEntity); assertThat(result.getMappedIds(), is(emptyList())); assertThat(result.getUnmappedIds(), is(emptyList())); assertThat(result.getWarnings(), is(emptyList())); } @Test void multipleItemsInResponseProduceCorrectResult() { ResponseEntity<String> responseEntity = ResponseEntity.status(HttpStatus.OK).body("From1\tTo1\n" + "From2\tTo1;To2\n"); IdMappingResult result = converter.convertToIDMappings(request, 20, 40, responseEntity); assertThat( result.getMappedIds(), contains( new IdMappingStringPair("From1", "To1"), new IdMappingStringPair("From2", "To1"), new IdMappingStringPair("From2", "To2"))); assertThat(result.getUnmappedIds(), is(emptyList())); assertThat(result.getWarnings(), is(emptyList())); } @Test void filterInvalidIdsFromResponse() { IdMappingJobRequest request = new IdMappingJobRequest(); request.setTo("UniProtKB"); ResponseEntity<String> responseEntity = ResponseEntity.status(HttpStatus.OK) .body( "From1\tP12345\n" + "From2\tP12345;UPI0000000001;P21802\n" + "From3\tUPI0000000001;P12346\n"); IdMappingResult result = converter.convertToIDMappings(request, 20, 40, responseEntity); assertThat( result.getMappedIds(), contains( new IdMappingStringPair("From1", "P12345"), new IdMappingStringPair("From2", "P12345"), new IdMappingStringPair("From2", "P21802"), new IdMappingStringPair("From3", "P12346"))); assertThat(result.getUnmappedIds(), is(emptyList())); assertThat(result.getWarnings(), is(emptyList())); } @Test void responseWithTaxIdHandledCorrectly() { ResponseEntity<String> responseEntity = ResponseEntity.status(HttpStatus.OK) .body("Taxonomy ID: 9606\n" + "\n" + "From1\tTo1\n" + "From2\tTo1;To2\n"); IdMappingResult result = converter.convertToIDMappings(request, 20, 40, responseEntity); assertThat( result.getMappedIds(), contains( new IdMappingStringPair("From1", "To1"), new IdMappingStringPair("From2", "To1"), new IdMappingStringPair("From2", "To2"))); assertThat(result.getUnmappedIds(), is(emptyList())); assertThat(result.getWarnings(), is(emptyList())); } @Test void failedMappingsReturnedInResponse() { ResponseEntity<String> responseEntity = ResponseEntity.status(HttpStatus.OK) .body( "Taxonomy ID: 9606\n" + "\n" + "From1\tTo1\n" + "From2\tTo1;To2\n" + "gene 12\n" + "gene 36\n" + "\n" + "\n" + "\n" + "\n" + "\n" + "MSG: 200 -- 2 IDs have no matches: \"gene 12,gene 36,\".\n"); IdMappingResult result = converter.convertToIDMappings(request, 20, 40, responseEntity); assertThat( result.getMappedIds(), contains( new IdMappingStringPair("From1", "To1"), new IdMappingStringPair("From2", "To1"), new IdMappingStringPair("From2", "To2"))); assertThat(result.getUnmappedIds(), contains("gene 12", "gene 36")); assertThat(result.getWarnings(), is(emptyList())); } @Test void checkNoMatchesAreFoundCorrectly() { ResponseEntity<String> responseEntity = ResponseEntity.status(HttpStatus.OK) .body("\n" + "\n" + "\n" + "\n" + "\n" + "MSG: 200 -- No Matches."); List<String> ids = List.of("id1", "id2", "id3", "id4"); String idsStr = String.join(",", ids); request.setIds(idsStr); IdMappingResult result = converter.convertToIDMappings(request, 20, 40, responseEntity); assertThat(result.getMappedIds(), is(emptyList())); assertThat(result.getUnmappedIds(), is(ids)); assertThat(result.getWarnings(), is(emptyList())); } @ParameterizedTest @MethodSource("validToAndIdPairs") void checkValidPairs(String to, String id) { assertThat(isValidIdPattern(to, id), is(true)); } private static Stream<Arguments> validToAndIdPairs() { return Stream.of( Arguments.of("EMBL", "AAAAA10001.1"), Arguments.of( "EMBL", "CRAZY-IS-OKAY-FOR-NON-UNIPROTKB/UNIPARC/UNIREF because we do not need to fetch these ids from our store layer"), Arguments.of("UniProtKB", "P12345"), Arguments.of("UniRef50", "UniRef50_P12345"), Arguments.of("UniRef90", "UniRef90_P12345"), Arguments.of("UniRef100", "UniRef100_P12345"), Arguments.of("UniParc", "UPI0000000001")); } @ParameterizedTest @MethodSource("invalidToAndIdPairs") void checkInvalidPairs(String to, String id) { assertThat(isValidIdPattern(to, id), is(false)); } @Test void checkIdMappingResultWithWarning() { IdMappingJobRequest request = new IdMappingJobRequest(); request.setTo("UniProtKB"); // when more than allowed ids (20 for tests) for enrichment ResponseEntity<String> responseEntity = ResponseEntity.status(HttpStatus.OK) .body( "From1\tP00001;P00002;P00003;P00004;P00005\n" + "From2\tP00006;P00007;P00008;P00009;P00010\n" + "From3\tP00011;P00012;P00014;P00015;P00016\n" + "From4\tP00016;P00017;P00018;P00019;P00020\n" + "From5\tP00021\n"); int maxCountForDataEnrich = 20; IdMappingResult result = converter.convertToIDMappings(request, maxCountForDataEnrich, 40, responseEntity); assertFalse(result.getMappedIds().isEmpty()); assertEquals(21, result.getMappedIds().size()); assertFalse(result.getWarnings().isEmpty()); assertEquals(1, result.getWarnings().size()); assertEquals( ENRICHMENT_WARNING.getMessage() + maxCountForDataEnrich, result.getWarnings().get(0).getMessage()); assertThat(result.getUnmappedIds(), is(emptyList())); } @Test void checkIdMappingResultWithoutWarningForNonUniProtId() { IdMappingJobRequest request = new IdMappingJobRequest(); request.setTo("EMBL"); // when more than allowed ids (20 for tests) for enrichment ResponseEntity<String> responseEntity = ResponseEntity.status(HttpStatus.OK).body("From1\t00001;00002;00003;00004;00005\n"); IdMappingResult result = converter.convertToIDMappings(request, 4, 40, responseEntity); assertFalse(result.getMappedIds().isEmpty()); assertEquals(5, result.getMappedIds().size()); assertTrue(result.getWarnings().isEmpty()); assertThat(result.getUnmappedIds(), is(emptyList())); } @Test void checkIdMappingResultWithLimitExceedError() { IdMappingJobRequest request = new IdMappingJobRequest(); request.setTo("EMBL"); // when more than allowed ids (20 for tests) for enrichment ResponseEntity<String> responseEntity = ResponseEntity.status(HttpStatus.OK).body("From1\t00001;00002;00003;00004;00005\n"); IdMappingResult result = converter.convertToIDMappings(request, 4, 4, responseEntity); assertTrue(result.getMappedIds().isEmpty()); assertTrue(result.getWarnings().isEmpty()); assertThat(result.getUnmappedIds(), is(emptyList())); assertEquals(1, result.getErrors().size()); assertEquals( "Id Mapping API is not supported for mapping results with \"mapped to\" IDs more than 4", result.getErrors().get(0).getMessage()); assertEquals(40, result.getErrors().get(0).getCode()); } private static Stream<Arguments> invalidToAndIdPairs() { return Stream.of( Arguments.of("UniProtKB", "UPI0000000001"), Arguments.of("UniRef50", "UPI0000000001"), Arguments.of("UniRef90", "UPI0000000001"), Arguments.of("UniRef100", "UPI0000000001"), Arguments.of("UniParc", "P12345"), Arguments.of("UniParc", "UniRef100_P12345")); } }
/** * Copyright © 2020 SOLTEKNO.COM * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.thingsboard.server.common.msg.tools; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.temporal.ChronoUnit; import java.time.temporal.TemporalAdjuster; import java.time.temporal.TemporalAdjusters; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static java.time.ZoneOffset.UTC; import static java.time.temporal.ChronoField.DAY_OF_MONTH; import static java.time.temporal.ChronoUnit.MONTHS; public class SchedulerUtils { private static final ConcurrentMap<String, ZoneId> tzMap = new ConcurrentHashMap<>(); public static ZoneId getZoneId(String tz) { return tzMap.computeIfAbsent(tz == null || tz.isEmpty() ? "UTC" : tz, ZoneId::of); } public static long getStartOfCurrentHour() { return getStartOfCurrentHour(UTC); } public static long getStartOfCurrentHour(ZoneId zoneId) { return LocalDateTime.now(UTC).atZone(zoneId).truncatedTo(ChronoUnit.HOURS).toInstant().toEpochMilli(); } public static long getStartOfCurrentMonth() { return getStartOfCurrentMonth(UTC); } public static long getStartOfCurrentMonth(ZoneId zoneId) { return LocalDate.now(UTC).withDayOfMonth(1).atStartOfDay(zoneId).toInstant().toEpochMilli(); } public static long getStartOfNextMonth() { return getStartOfNextMonth(UTC); } public static long getStartOfNextMonth(ZoneId zoneId) { return LocalDate.now(UTC).with(TemporalAdjusters.firstDayOfNextMonth()).atStartOfDay(zoneId).toInstant().toEpochMilli(); } public static long getStartOfNextNextMonth() { return getStartOfNextNextMonth(UTC); } public static long getStartOfNextNextMonth(ZoneId zoneId) { return LocalDate.now(UTC).with(firstDayOfNextNextMonth()).atStartOfDay(zoneId).toInstant().toEpochMilli(); } public static TemporalAdjuster firstDayOfNextNextMonth() { return (temporal) -> temporal.with(DAY_OF_MONTH, 1).plus(2, MONTHS); } }
/* * Copyright 2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package leap.lang.accessor; import leap.lang.Named; import leap.lang.Objects2; import leap.lang.convert.Converts; import leap.lang.exception.ObjectNotFoundException; import java.math.BigDecimal; public interface NamedGetter extends ObjectPropertyGetter, Getter { /** * Returns true if the name exists. */ boolean contains(String name); /** * Checks the name must be exists. * * @throws ObjectNotFoundException if the name not exists. */ default NamedGetter mustContains(String name) throws ObjectNotFoundException { if(!contains(name)) { throw new ObjectNotFoundException("The name '" + name + "' not exists!"); } return this; } @Override default Object getProperty(String name) { return get(name); } /** * Returns the named value (may be null). * * @throws ObjectNotFoundException if the name not exists. */ default Object mustGet(String name) { return mustContains(name).get(name); } /** * Returns the named value (may be null). * * <p/> * Returns <code>null</code> if the name not exists. */ default Object get(Named named) { return get(named.getName()); } /** * Returns the named value (may be null). * * <p/> * Returns <code>null</code> if the name not exists. */ default <T> T get(String name, Class<T> type) { Object v = get(name); return Objects2.isEmpty(v) ? null : Converts.convert(v, type); } /** * Returns the named value (may be null). * * <p/> * Returns <code>null</code> if the name not exists. */ default <T> T get(Named named, Class<T> type) { Object v = get(named.getName()); return Objects2.isEmpty(v) ? null : Converts.convert(v, type); } /** * Returns the named value, returns default value if the value is null or empty. * * @throws ObjectNotFoundException if the value not exists. */ default <T> T get(String name, Class<T> type, T defaultValue) { Object v = get(name); return Objects2.isEmpty(v) ? defaultValue : Converts.convert(v, type); } /** * Returns the named value as {@link String}. */ default String getString(String name) { return get(name, String.class); } /** * Returns the named value as {@link Short}. */ default Short getShort(String name) { return get(name, Short.class); } /** * Returns the named value as <code>short</code>, returns default value if the value is null. */ default short getShort(String name, short defaultValue) { return get(name, Short.class, defaultValue); } /** * Returns the named value as {@link Integer}. */ default Integer getInteger(String name) { return get(name, Integer.class); } /** * Returns the named value as <code>int</code>, returns default value if the value is null. */ default int getInteger(String name, int defaultValue) { return get(name, Integer.class, defaultValue); } /** * Returns the named value as {@link Long}. */ default Long getLong(String name) { return get(name, Long.class); } /** * Returns the named value as <code>long</code>, returns default value if the value is null. */ default long getLong(String name, long defaultValue) { return get(name, Long.class, defaultValue); } /** * Returns the named value as {@link Float}. */ default Float getFloat(String name) { return get(name, Float.class); } /** * Returns the named value as <code>float</code>, returns default value if the value is null. */ default float getFloat(String name, float defaultValue) { return get(name, Float.class, defaultValue); } /** * Returns the named value as {@link Double}. */ default Double getDouble(String name) { return get(name, Double.class); } /** * Returns the named value as <code>double</code>, returns default value if the value is null. */ default double getDouble(String name, double defaultValue) { return get(name, Double.class, defaultValue); } /** * Returns the named value as {@link BigDecimal}. */ default BigDecimal getDecimal(String name) { return get(name, BigDecimal.class); } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.extractor.mkv; import static com.google.android.exoplayer2.util.Assertions.checkArgument; import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Assertions.checkState; import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; import static java.lang.Math.max; import static java.lang.Math.min; import static java.lang.annotation.ElementType.TYPE_USE; import android.util.Pair; import android.util.SparseArray; import androidx.annotation.CallSuper; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.audio.AacUtil; import com.google.android.exoplayer2.audio.MpegAudioUtil; import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.extractor.ChunkIndex; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.ExtractorsFactory; import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.TrueHdSampleRechunker; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.LongArray; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.NalUnitUtil; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.video.AvcConfig; import com.google.android.exoplayer2.video.ColorInfo; import com.google.android.exoplayer2.video.DolbyVisionConfig; import com.google.android.exoplayer2.video.HevcConfig; import com.google.common.collect.ImmutableList; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.UUID; import org.checkerframework.checker.nullness.compatqual.NullableType; import org.checkerframework.checker.nullness.qual.EnsuresNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** Extracts data from the Matroska and WebM container formats. */ public class MatroskaExtractor implements Extractor { /** Factory for {@link MatroskaExtractor} instances. */ public static final ExtractorsFactory FACTORY = () -> new Extractor[] {new MatroskaExtractor()}; /** * Flags controlling the behavior of the extractor. Possible flag value is {@link * #FLAG_DISABLE_SEEK_FOR_CUES}. */ @Documented @Retention(RetentionPolicy.SOURCE) @Target(TYPE_USE) @IntDef( flag = true, value = {FLAG_DISABLE_SEEK_FOR_CUES}) public @interface Flags {} /** * Flag to disable seeking for cues. * * <p>Normally (i.e. when this flag is not set) the extractor will seek to the cues element if its * position is specified in the seek head and if it's after the first cluster. Setting this flag * disables seeking to the cues element. If the cues element is after the first cluster then the * media is treated as being unseekable. */ public static final int FLAG_DISABLE_SEEK_FOR_CUES = 1; private static final String TAG = "MatroskaExtractor"; private static final int UNSET_ENTRY_ID = -1; private static final int BLOCK_STATE_START = 0; private static final int BLOCK_STATE_HEADER = 1; private static final int BLOCK_STATE_DATA = 2; private static final String DOC_TYPE_MATROSKA = "matroska"; private static final String DOC_TYPE_WEBM = "webm"; private static final String CODEC_ID_VP8 = "V_VP8"; private static final String CODEC_ID_VP9 = "V_VP9"; private static final String CODEC_ID_AV1 = "V_AV1"; private static final String CODEC_ID_MPEG2 = "V_MPEG2"; private static final String CODEC_ID_MPEG4_SP = "V_MPEG4/ISO/SP"; private static final String CODEC_ID_MPEG4_ASP = "V_MPEG4/ISO/ASP"; private static final String CODEC_ID_MPEG4_AP = "V_MPEG4/ISO/AP"; private static final String CODEC_ID_H264 = "V_MPEG4/ISO/AVC"; private static final String CODEC_ID_H265 = "V_MPEGH/ISO/HEVC"; private static final String CODEC_ID_FOURCC = "V_MS/VFW/FOURCC"; private static final String CODEC_ID_THEORA = "V_THEORA"; private static final String CODEC_ID_VORBIS = "A_VORBIS"; private static final String CODEC_ID_OPUS = "A_OPUS"; private static final String CODEC_ID_AAC = "A_AAC"; private static final String CODEC_ID_MP2 = "A_MPEG/L2"; private static final String CODEC_ID_MP3 = "A_MPEG/L3"; private static final String CODEC_ID_AC3 = "A_AC3"; private static final String CODEC_ID_E_AC3 = "A_EAC3"; private static final String CODEC_ID_TRUEHD = "A_TRUEHD"; private static final String CODEC_ID_DTS = "A_DTS"; private static final String CODEC_ID_DTS_EXPRESS = "A_DTS/EXPRESS"; private static final String CODEC_ID_DTS_LOSSLESS = "A_DTS/LOSSLESS"; private static final String CODEC_ID_FLAC = "A_FLAC"; private static final String CODEC_ID_ACM = "A_MS/ACM"; private static final String CODEC_ID_PCM_INT_LIT = "A_PCM/INT/LIT"; private static final String CODEC_ID_PCM_INT_BIG = "A_PCM/INT/BIG"; private static final String CODEC_ID_PCM_FLOAT = "A_PCM/FLOAT/IEEE"; private static final String CODEC_ID_SUBRIP = "S_TEXT/UTF8"; private static final String CODEC_ID_ASS = "S_TEXT/ASS"; private static final String CODEC_ID_VTT = "S_TEXT/WEBVTT"; private static final String CODEC_ID_VOBSUB = "S_VOBSUB"; private static final String CODEC_ID_PGS = "S_HDMV/PGS"; private static final String CODEC_ID_DVBSUB = "S_DVBSUB"; private static final int VORBIS_MAX_INPUT_SIZE = 8192; private static final int OPUS_MAX_INPUT_SIZE = 5760; private static final int ENCRYPTION_IV_SIZE = 8; private static final int TRACK_TYPE_AUDIO = 2; private static final int ID_EBML = 0x1A45DFA3; private static final int ID_EBML_READ_VERSION = 0x42F7; private static final int ID_DOC_TYPE = 0x4282; private static final int ID_DOC_TYPE_READ_VERSION = 0x4285; private static final int ID_SEGMENT = 0x18538067; private static final int ID_SEGMENT_INFO = 0x1549A966; private static final int ID_SEEK_HEAD = 0x114D9B74; private static final int ID_SEEK = 0x4DBB; private static final int ID_SEEK_ID = 0x53AB; private static final int ID_SEEK_POSITION = 0x53AC; private static final int ID_INFO = 0x1549A966; private static final int ID_TIMECODE_SCALE = 0x2AD7B1; private static final int ID_DURATION = 0x4489; private static final int ID_CLUSTER = 0x1F43B675; private static final int ID_TIME_CODE = 0xE7; private static final int ID_SIMPLE_BLOCK = 0xA3; private static final int ID_BLOCK_GROUP = 0xA0; private static final int ID_BLOCK = 0xA1; private static final int ID_BLOCK_DURATION = 0x9B; private static final int ID_BLOCK_ADDITIONS = 0x75A1; private static final int ID_BLOCK_MORE = 0xA6; private static final int ID_BLOCK_ADD_ID = 0xEE; private static final int ID_BLOCK_ADDITIONAL = 0xA5; private static final int ID_REFERENCE_BLOCK = 0xFB; private static final int ID_TRACKS = 0x1654AE6B; private static final int ID_TRACK_ENTRY = 0xAE; private static final int ID_TRACK_NUMBER = 0xD7; private static final int ID_TRACK_TYPE = 0x83; private static final int ID_FLAG_DEFAULT = 0x88; private static final int ID_FLAG_FORCED = 0x55AA; private static final int ID_DEFAULT_DURATION = 0x23E383; private static final int ID_MAX_BLOCK_ADDITION_ID = 0x55EE; private static final int ID_BLOCK_ADDITION_MAPPING = 0x41E4; private static final int ID_BLOCK_ADD_ID_TYPE = 0x41E7; private static final int ID_BLOCK_ADD_ID_EXTRA_DATA = 0x41ED; private static final int ID_NAME = 0x536E; private static final int ID_CODEC_ID = 0x86; private static final int ID_CODEC_PRIVATE = 0x63A2; private static final int ID_CODEC_DELAY = 0x56AA; private static final int ID_SEEK_PRE_ROLL = 0x56BB; private static final int ID_VIDEO = 0xE0; private static final int ID_PIXEL_WIDTH = 0xB0; private static final int ID_PIXEL_HEIGHT = 0xBA; private static final int ID_DISPLAY_WIDTH = 0x54B0; private static final int ID_DISPLAY_HEIGHT = 0x54BA; private static final int ID_DISPLAY_UNIT = 0x54B2; private static final int ID_AUDIO = 0xE1; private static final int ID_CHANNELS = 0x9F; private static final int ID_AUDIO_BIT_DEPTH = 0x6264; private static final int ID_SAMPLING_FREQUENCY = 0xB5; private static final int ID_CONTENT_ENCODINGS = 0x6D80; private static final int ID_CONTENT_ENCODING = 0x6240; private static final int ID_CONTENT_ENCODING_ORDER = 0x5031; private static final int ID_CONTENT_ENCODING_SCOPE = 0x5032; private static final int ID_CONTENT_COMPRESSION = 0x5034; private static final int ID_CONTENT_COMPRESSION_ALGORITHM = 0x4254; private static final int ID_CONTENT_COMPRESSION_SETTINGS = 0x4255; private static final int ID_CONTENT_ENCRYPTION = 0x5035; private static final int ID_CONTENT_ENCRYPTION_ALGORITHM = 0x47E1; private static final int ID_CONTENT_ENCRYPTION_KEY_ID = 0x47E2; private static final int ID_CONTENT_ENCRYPTION_AES_SETTINGS = 0x47E7; private static final int ID_CONTENT_ENCRYPTION_AES_SETTINGS_CIPHER_MODE = 0x47E8; private static final int ID_CUES = 0x1C53BB6B; private static final int ID_CUE_POINT = 0xBB; private static final int ID_CUE_TIME = 0xB3; private static final int ID_CUE_TRACK_POSITIONS = 0xB7; private static final int ID_CUE_CLUSTER_POSITION = 0xF1; private static final int ID_LANGUAGE = 0x22B59C; private static final int ID_PROJECTION = 0x7670; private static final int ID_PROJECTION_TYPE = 0x7671; private static final int ID_PROJECTION_PRIVATE = 0x7672; private static final int ID_PROJECTION_POSE_YAW = 0x7673; private static final int ID_PROJECTION_POSE_PITCH = 0x7674; private static final int ID_PROJECTION_POSE_ROLL = 0x7675; private static final int ID_STEREO_MODE = 0x53B8; private static final int ID_COLOUR = 0x55B0; private static final int ID_COLOUR_RANGE = 0x55B9; private static final int ID_COLOUR_TRANSFER = 0x55BA; private static final int ID_COLOUR_PRIMARIES = 0x55BB; private static final int ID_MAX_CLL = 0x55BC; private static final int ID_MAX_FALL = 0x55BD; private static final int ID_MASTERING_METADATA = 0x55D0; private static final int ID_PRIMARY_R_CHROMATICITY_X = 0x55D1; private static final int ID_PRIMARY_R_CHROMATICITY_Y = 0x55D2; private static final int ID_PRIMARY_G_CHROMATICITY_X = 0x55D3; private static final int ID_PRIMARY_G_CHROMATICITY_Y = 0x55D4; private static final int ID_PRIMARY_B_CHROMATICITY_X = 0x55D5; private static final int ID_PRIMARY_B_CHROMATICITY_Y = 0x55D6; private static final int ID_WHITE_POINT_CHROMATICITY_X = 0x55D7; private static final int ID_WHITE_POINT_CHROMATICITY_Y = 0x55D8; private static final int ID_LUMNINANCE_MAX = 0x55D9; private static final int ID_LUMNINANCE_MIN = 0x55DA; /** * BlockAddID value for ITU T.35 metadata in a VP9 track. See also * https://www.webmproject.org/docs/container/. */ private static final int BLOCK_ADDITIONAL_ID_VP9_ITU_T_35 = 4; /** * BlockAddIdType value for Dolby Vision configuration with profile <= 7. See also * https://www.matroska.org/technical/codec_specs.html. */ private static final int BLOCK_ADD_ID_TYPE_DVCC = 0x64766343; /** * BlockAddIdType value for Dolby Vision configuration with profile > 7. See also * https://www.matroska.org/technical/codec_specs.html. */ private static final int BLOCK_ADD_ID_TYPE_DVVC = 0x64767643; private static final int LACING_NONE = 0; private static final int LACING_XIPH = 1; private static final int LACING_FIXED_SIZE = 2; private static final int LACING_EBML = 3; private static final int FOURCC_COMPRESSION_DIVX = 0x58564944; private static final int FOURCC_COMPRESSION_H263 = 0x33363248; private static final int FOURCC_COMPRESSION_VC1 = 0x31435657; /** * A template for the prefix that must be added to each subrip sample. * * <p>The display time of each subtitle is passed as {@code timeUs} to {@link * TrackOutput#sampleMetadata}. The start and end timecodes in this template are relative to * {@code timeUs}. Hence the start timecode is always zero. The 12 byte end timecode starting at * {@link #SUBRIP_PREFIX_END_TIMECODE_OFFSET} is set to a placeholder value, and must be replaced * with the duration of the subtitle. * * <p>Equivalent to the UTF-8 string: "1\n00:00:00,000 --> 00:00:00,000\n". */ private static final byte[] SUBRIP_PREFIX = new byte[] { 49, 10, 48, 48, 58, 48, 48, 58, 48, 48, 44, 48, 48, 48, 32, 45, 45, 62, 32, 48, 48, 58, 48, 48, 58, 48, 48, 44, 48, 48, 48, 10 }; /** The byte offset of the end timecode in {@link #SUBRIP_PREFIX}. */ private static final int SUBRIP_PREFIX_END_TIMECODE_OFFSET = 19; /** * The value by which to divide a time in microseconds to convert it to the unit of the last value * in a subrip timecode (milliseconds). */ private static final long SUBRIP_TIMECODE_LAST_VALUE_SCALING_FACTOR = 1000; /** The format of a subrip timecode. */ private static final String SUBRIP_TIMECODE_FORMAT = "%02d:%02d:%02d,%03d"; /** Matroska specific format line for SSA subtitles. */ private static final byte[] SSA_DIALOGUE_FORMAT = Util.getUtf8Bytes( "Format: Start, End, " + "ReadOrder, Layer, Style, Name, MarginL, MarginR, MarginV, Effect, Text"); /** * A template for the prefix that must be added to each SSA sample. * * <p>The display time of each subtitle is passed as {@code timeUs} to {@link * TrackOutput#sampleMetadata}. The start and end timecodes in this template are relative to * {@code timeUs}. Hence the start timecode is always zero. The 12 byte end timecode starting at * {@link #SUBRIP_PREFIX_END_TIMECODE_OFFSET} is set to a placeholder value, and must be replaced * with the duration of the subtitle. * * <p>Equivalent to the UTF-8 string: "Dialogue: 0:00:00:00,0:00:00:00,". */ private static final byte[] SSA_PREFIX = new byte[] { 68, 105, 97, 108, 111, 103, 117, 101, 58, 32, 48, 58, 48, 48, 58, 48, 48, 58, 48, 48, 44, 48, 58, 48, 48, 58, 48, 48, 58, 48, 48, 44 }; /** The byte offset of the end timecode in {@link #SSA_PREFIX}. */ private static final int SSA_PREFIX_END_TIMECODE_OFFSET = 21; /** * The value by which to divide a time in microseconds to convert it to the unit of the last value * in an SSA timecode (1/100ths of a second). */ private static final long SSA_TIMECODE_LAST_VALUE_SCALING_FACTOR = 10_000; /** The format of an SSA timecode. */ private static final String SSA_TIMECODE_FORMAT = "%01d:%02d:%02d:%02d"; /** * A template for the prefix that must be added to each VTT sample. * * <p>The display time of each subtitle is passed as {@code timeUs} to {@link * TrackOutput#sampleMetadata}. The start and end timecodes in this template are relative to * {@code timeUs}. Hence the start timecode is always zero. The 12 byte end timecode starting at * {@link #VTT_PREFIX_END_TIMECODE_OFFSET} is set to a placeholder value, and must be replaced * with the duration of the subtitle. * * <p>Equivalent to the UTF-8 string: "WEBVTT\n\n00:00:00.000 --> 00:00:00.000\n". */ private static final byte[] VTT_PREFIX = new byte[] { 87, 69, 66, 86, 84, 84, 10, 10, 48, 48, 58, 48, 48, 58, 48, 48, 46, 48, 48, 48, 32, 45, 45, 62, 32, 48, 48, 58, 48, 48, 58, 48, 48, 46, 48, 48, 48, 10 }; /** The byte offset of the end timecode in {@link #VTT_PREFIX}. */ private static final int VTT_PREFIX_END_TIMECODE_OFFSET = 25; /** * The value by which to divide a time in microseconds to convert it to the unit of the last value * in a VTT timecode (milliseconds). */ private static final long VTT_TIMECODE_LAST_VALUE_SCALING_FACTOR = 1000; /** The format of a VTT timecode. */ private static final String VTT_TIMECODE_FORMAT = "%02d:%02d:%02d.%03d"; /** The length in bytes of a WAVEFORMATEX structure. */ private static final int WAVE_FORMAT_SIZE = 18; /** Format tag indicating a WAVEFORMATEXTENSIBLE structure. */ private static final int WAVE_FORMAT_EXTENSIBLE = 0xFFFE; /** Format tag for PCM. */ private static final int WAVE_FORMAT_PCM = 1; /** Sub format for PCM. */ private static final UUID WAVE_SUBFORMAT_PCM = new UUID(0x0100000000001000L, 0x800000AA00389B71L); /** Some HTC devices signal rotation in track names. */ private static final Map<String, Integer> TRACK_NAME_TO_ROTATION_DEGREES; static { Map<String, Integer> trackNameToRotationDegrees = new HashMap<>(); trackNameToRotationDegrees.put("htc_video_rotA-000", 0); trackNameToRotationDegrees.put("htc_video_rotA-090", 90); trackNameToRotationDegrees.put("htc_video_rotA-180", 180); trackNameToRotationDegrees.put("htc_video_rotA-270", 270); TRACK_NAME_TO_ROTATION_DEGREES = Collections.unmodifiableMap(trackNameToRotationDegrees); } private final EbmlReader reader; private final VarintReader varintReader; private final SparseArray<Track> tracks; private final boolean seekForCuesEnabled; // Temporary arrays. private final ParsableByteArray nalStartCode; private final ParsableByteArray nalLength; private final ParsableByteArray scratch; private final ParsableByteArray vorbisNumPageSamples; private final ParsableByteArray seekEntryIdBytes; private final ParsableByteArray sampleStrippedBytes; private final ParsableByteArray subtitleSample; private final ParsableByteArray encryptionInitializationVector; private final ParsableByteArray encryptionSubsampleData; private final ParsableByteArray blockAdditionalData; private @MonotonicNonNull ByteBuffer encryptionSubsampleDataBuffer; private long segmentContentSize; private long segmentContentPosition = C.POSITION_UNSET; private long timecodeScale = C.TIME_UNSET; private long durationTimecode = C.TIME_UNSET; private long durationUs = C.TIME_UNSET; // The track corresponding to the current TrackEntry element, or null. @Nullable private Track currentTrack; // Whether a seek map has been sent to the output. private boolean sentSeekMap; // Master seek entry related elements. private int seekEntryId; private long seekEntryPosition; // Cue related elements. private boolean seekForCues; private long cuesContentPosition = C.POSITION_UNSET; private long seekPositionAfterBuildingCues = C.POSITION_UNSET; private long clusterTimecodeUs = C.TIME_UNSET; @Nullable private LongArray cueTimesUs; @Nullable private LongArray cueClusterPositions; private boolean seenClusterPositionForCurrentCuePoint; // Reading state. private boolean haveOutputSample; // Block reading state. private int blockState; private long blockTimeUs; private long blockDurationUs; private int blockSampleIndex; private int blockSampleCount; private int[] blockSampleSizes; private int blockTrackNumber; private int blockTrackNumberLength; private @C.BufferFlags int blockFlags; private int blockAdditionalId; private boolean blockHasReferenceBlock; // Sample writing state. private int sampleBytesRead; private int sampleBytesWritten; private int sampleCurrentNalBytesRemaining; private boolean sampleEncodingHandled; private boolean sampleSignalByteRead; private boolean samplePartitionCountRead; private int samplePartitionCount; private byte sampleSignalByte; private boolean sampleInitializationVectorRead; // Extractor outputs. private @MonotonicNonNull ExtractorOutput extractorOutput; public MatroskaExtractor() { this(0); } public MatroskaExtractor(@Flags int flags) { this(new DefaultEbmlReader(), flags); } /* package */ MatroskaExtractor(EbmlReader reader, @Flags int flags) { this.reader = reader; this.reader.init(new InnerEbmlProcessor()); seekForCuesEnabled = (flags & FLAG_DISABLE_SEEK_FOR_CUES) == 0; varintReader = new VarintReader(); tracks = new SparseArray<>(); scratch = new ParsableByteArray(4); vorbisNumPageSamples = new ParsableByteArray(ByteBuffer.allocate(4).putInt(-1).array()); seekEntryIdBytes = new ParsableByteArray(4); nalStartCode = new ParsableByteArray(NalUnitUtil.NAL_START_CODE); nalLength = new ParsableByteArray(4); sampleStrippedBytes = new ParsableByteArray(); subtitleSample = new ParsableByteArray(); encryptionInitializationVector = new ParsableByteArray(ENCRYPTION_IV_SIZE); encryptionSubsampleData = new ParsableByteArray(); blockAdditionalData = new ParsableByteArray(); blockSampleSizes = new int[1]; } @Override public final boolean sniff(ExtractorInput input) throws IOException { return new Sniffer().sniff(input); } @Override public final void init(ExtractorOutput output) { extractorOutput = output; } @CallSuper @Override public void seek(long position, long timeUs) { clusterTimecodeUs = C.TIME_UNSET; blockState = BLOCK_STATE_START; reader.reset(); varintReader.reset(); resetWriteSampleData(); for (int i = 0; i < tracks.size(); i++) { tracks.valueAt(i).reset(); } } @Override public final void release() { // Do nothing } @Override public final int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { haveOutputSample = false; boolean continueReading = true; while (continueReading && !haveOutputSample) { continueReading = reader.read(input); if (continueReading && maybeSeekForCues(seekPosition, input.getPosition())) { return Extractor.RESULT_SEEK; } } if (!continueReading) { for (int i = 0; i < tracks.size(); i++) { Track track = tracks.valueAt(i); track.assertOutputInitialized(); track.outputPendingSampleMetadata(); } return Extractor.RESULT_END_OF_INPUT; } return Extractor.RESULT_CONTINUE; } /** * Maps an element ID to a corresponding type. * * @see EbmlProcessor#getElementType(int) */ @CallSuper protected @EbmlProcessor.ElementType int getElementType(int id) { switch (id) { case ID_EBML: case ID_SEGMENT: case ID_SEEK_HEAD: case ID_SEEK: case ID_INFO: case ID_CLUSTER: case ID_TRACKS: case ID_TRACK_ENTRY: case ID_BLOCK_ADDITION_MAPPING: case ID_AUDIO: case ID_VIDEO: case ID_CONTENT_ENCODINGS: case ID_CONTENT_ENCODING: case ID_CONTENT_COMPRESSION: case ID_CONTENT_ENCRYPTION: case ID_CONTENT_ENCRYPTION_AES_SETTINGS: case ID_CUES: case ID_CUE_POINT: case ID_CUE_TRACK_POSITIONS: case ID_BLOCK_GROUP: case ID_BLOCK_ADDITIONS: case ID_BLOCK_MORE: case ID_PROJECTION: case ID_COLOUR: case ID_MASTERING_METADATA: return EbmlProcessor.ELEMENT_TYPE_MASTER; case ID_EBML_READ_VERSION: case ID_DOC_TYPE_READ_VERSION: case ID_SEEK_POSITION: case ID_TIMECODE_SCALE: case ID_TIME_CODE: case ID_BLOCK_DURATION: case ID_PIXEL_WIDTH: case ID_PIXEL_HEIGHT: case ID_DISPLAY_WIDTH: case ID_DISPLAY_HEIGHT: case ID_DISPLAY_UNIT: case ID_TRACK_NUMBER: case ID_TRACK_TYPE: case ID_FLAG_DEFAULT: case ID_FLAG_FORCED: case ID_DEFAULT_DURATION: case ID_MAX_BLOCK_ADDITION_ID: case ID_BLOCK_ADD_ID_TYPE: case ID_CODEC_DELAY: case ID_SEEK_PRE_ROLL: case ID_CHANNELS: case ID_AUDIO_BIT_DEPTH: case ID_CONTENT_ENCODING_ORDER: case ID_CONTENT_ENCODING_SCOPE: case ID_CONTENT_COMPRESSION_ALGORITHM: case ID_CONTENT_ENCRYPTION_ALGORITHM: case ID_CONTENT_ENCRYPTION_AES_SETTINGS_CIPHER_MODE: case ID_CUE_TIME: case ID_CUE_CLUSTER_POSITION: case ID_REFERENCE_BLOCK: case ID_STEREO_MODE: case ID_COLOUR_RANGE: case ID_COLOUR_TRANSFER: case ID_COLOUR_PRIMARIES: case ID_MAX_CLL: case ID_MAX_FALL: case ID_PROJECTION_TYPE: case ID_BLOCK_ADD_ID: return EbmlProcessor.ELEMENT_TYPE_UNSIGNED_INT; case ID_DOC_TYPE: case ID_NAME: case ID_CODEC_ID: case ID_LANGUAGE: return EbmlProcessor.ELEMENT_TYPE_STRING; case ID_SEEK_ID: case ID_BLOCK_ADD_ID_EXTRA_DATA: case ID_CONTENT_COMPRESSION_SETTINGS: case ID_CONTENT_ENCRYPTION_KEY_ID: case ID_SIMPLE_BLOCK: case ID_BLOCK: case ID_CODEC_PRIVATE: case ID_PROJECTION_PRIVATE: case ID_BLOCK_ADDITIONAL: return EbmlProcessor.ELEMENT_TYPE_BINARY; case ID_DURATION: case ID_SAMPLING_FREQUENCY: case ID_PRIMARY_R_CHROMATICITY_X: case ID_PRIMARY_R_CHROMATICITY_Y: case ID_PRIMARY_G_CHROMATICITY_X: case ID_PRIMARY_G_CHROMATICITY_Y: case ID_PRIMARY_B_CHROMATICITY_X: case ID_PRIMARY_B_CHROMATICITY_Y: case ID_WHITE_POINT_CHROMATICITY_X: case ID_WHITE_POINT_CHROMATICITY_Y: case ID_LUMNINANCE_MAX: case ID_LUMNINANCE_MIN: case ID_PROJECTION_POSE_YAW: case ID_PROJECTION_POSE_PITCH: case ID_PROJECTION_POSE_ROLL: return EbmlProcessor.ELEMENT_TYPE_FLOAT; default: return EbmlProcessor.ELEMENT_TYPE_UNKNOWN; } } /** * Checks if the given id is that of a level 1 element. * * @see EbmlProcessor#isLevel1Element(int) */ @CallSuper protected boolean isLevel1Element(int id) { return id == ID_SEGMENT_INFO || id == ID_CLUSTER || id == ID_CUES || id == ID_TRACKS; } /** * Called when the start of a master element is encountered. * * @see EbmlProcessor#startMasterElement(int, long, long) */ @CallSuper protected void startMasterElement(int id, long contentPosition, long contentSize) throws ParserException { assertInitialized(); switch (id) { case ID_SEGMENT: if (segmentContentPosition != C.POSITION_UNSET && segmentContentPosition != contentPosition) { throw ParserException.createForMalformedContainer( "Multiple Segment elements not supported", /* cause= */ null); } segmentContentPosition = contentPosition; segmentContentSize = contentSize; break; case ID_SEEK: seekEntryId = UNSET_ENTRY_ID; seekEntryPosition = C.POSITION_UNSET; break; case ID_CUES: cueTimesUs = new LongArray(); cueClusterPositions = new LongArray(); break; case ID_CUE_POINT: seenClusterPositionForCurrentCuePoint = false; break; case ID_CLUSTER: if (!sentSeekMap) { // We need to build cues before parsing the cluster. if (seekForCuesEnabled && cuesContentPosition != C.POSITION_UNSET) { // We know where the Cues element is located. Seek to request it. seekForCues = true; } else { // We don't know where the Cues element is located. It's most likely omitted. Allow // playback, but disable seeking. extractorOutput.seekMap(new SeekMap.Unseekable(durationUs)); sentSeekMap = true; } } break; case ID_BLOCK_GROUP: blockHasReferenceBlock = false; break; case ID_CONTENT_ENCODING: // TODO: check and fail if more than one content encoding is present. break; case ID_CONTENT_ENCRYPTION: getCurrentTrack(id).hasContentEncryption = true; break; case ID_TRACK_ENTRY: currentTrack = new Track(); break; case ID_MASTERING_METADATA: getCurrentTrack(id).hasColorInfo = true; break; default: break; } } /** * Called when the end of a master element is encountered. * * @see EbmlProcessor#endMasterElement(int) */ @CallSuper protected void endMasterElement(int id) throws ParserException { assertInitialized(); switch (id) { case ID_SEGMENT_INFO: if (timecodeScale == C.TIME_UNSET) { // timecodeScale was omitted. Use the default value. timecodeScale = 1000000; } if (durationTimecode != C.TIME_UNSET) { durationUs = scaleTimecodeToUs(durationTimecode); } break; case ID_SEEK: if (seekEntryId == UNSET_ENTRY_ID || seekEntryPosition == C.POSITION_UNSET) { throw ParserException.createForMalformedContainer( "Mandatory element SeekID or SeekPosition not found", /* cause= */ null); } if (seekEntryId == ID_CUES) { cuesContentPosition = seekEntryPosition; } break; case ID_CUES: if (!sentSeekMap) { extractorOutput.seekMap(buildSeekMap(cueTimesUs, cueClusterPositions)); sentSeekMap = true; } else { // We have already built the cues. Ignore. } this.cueTimesUs = null; this.cueClusterPositions = null; break; case ID_BLOCK_GROUP: if (blockState != BLOCK_STATE_DATA) { // We've skipped this block (due to incompatible track number). return; } // Commit sample metadata. int sampleOffset = 0; for (int i = 0; i < blockSampleCount; i++) { sampleOffset += blockSampleSizes[i]; } Track track = tracks.get(blockTrackNumber); track.assertOutputInitialized(); for (int i = 0; i < blockSampleCount; i++) { long sampleTimeUs = blockTimeUs + (i * track.defaultSampleDurationNs) / 1000; int sampleFlags = blockFlags; if (i == 0 && !blockHasReferenceBlock) { // If the ReferenceBlock element was not found in this block, then the first frame is a // keyframe. sampleFlags |= C.BUFFER_FLAG_KEY_FRAME; } int sampleSize = blockSampleSizes[i]; sampleOffset -= sampleSize; // The offset is to the end of the sample. commitSampleToOutput(track, sampleTimeUs, sampleFlags, sampleSize, sampleOffset); } blockState = BLOCK_STATE_START; break; case ID_CONTENT_ENCODING: assertInTrackEntry(id); if (currentTrack.hasContentEncryption) { if (currentTrack.cryptoData == null) { throw ParserException.createForMalformedContainer( "Encrypted Track found but ContentEncKeyID was not found", /* cause= */ null); } currentTrack.drmInitData = new DrmInitData( new SchemeData( C.UUID_NIL, MimeTypes.VIDEO_WEBM, currentTrack.cryptoData.encryptionKey)); } break; case ID_CONTENT_ENCODINGS: assertInTrackEntry(id); if (currentTrack.hasContentEncryption && currentTrack.sampleStrippedBytes != null) { throw ParserException.createForMalformedContainer( "Combining encryption and compression is not supported", /* cause= */ null); } break; case ID_TRACK_ENTRY: Track currentTrack = checkStateNotNull(this.currentTrack); if (currentTrack.codecId == null) { throw ParserException.createForMalformedContainer( "CodecId is missing in TrackEntry element", /* cause= */ null); } else { if (isCodecSupported(currentTrack.codecId)) { currentTrack.initializeOutput(extractorOutput, currentTrack.number); tracks.put(currentTrack.number, currentTrack); } } this.currentTrack = null; break; case ID_TRACKS: if (tracks.size() == 0) { throw ParserException.createForMalformedContainer( "No valid tracks were found", /* cause= */ null); } extractorOutput.endTracks(); break; default: break; } } /** * Called when an integer element is encountered. * * @see EbmlProcessor#integerElement(int, long) */ @CallSuper protected void integerElement(int id, long value) throws ParserException { switch (id) { case ID_EBML_READ_VERSION: // Validate that EBMLReadVersion is supported. This extractor only supports v1. if (value != 1) { throw ParserException.createForMalformedContainer( "EBMLReadVersion " + value + " not supported", /* cause= */ null); } break; case ID_DOC_TYPE_READ_VERSION: // Validate that DocTypeReadVersion is supported. This extractor only supports up to v2. if (value < 1 || value > 2) { throw ParserException.createForMalformedContainer( "DocTypeReadVersion " + value + " not supported", /* cause= */ null); } break; case ID_SEEK_POSITION: // Seek Position is the relative offset beginning from the Segment. So to get absolute // offset from the beginning of the file, we need to add segmentContentPosition to it. seekEntryPosition = value + segmentContentPosition; break; case ID_TIMECODE_SCALE: timecodeScale = value; break; case ID_PIXEL_WIDTH: getCurrentTrack(id).width = (int) value; break; case ID_PIXEL_HEIGHT: getCurrentTrack(id).height = (int) value; break; case ID_DISPLAY_WIDTH: getCurrentTrack(id).displayWidth = (int) value; break; case ID_DISPLAY_HEIGHT: getCurrentTrack(id).displayHeight = (int) value; break; case ID_DISPLAY_UNIT: getCurrentTrack(id).displayUnit = (int) value; break; case ID_TRACK_NUMBER: getCurrentTrack(id).number = (int) value; break; case ID_FLAG_DEFAULT: getCurrentTrack(id).flagDefault = value == 1; break; case ID_FLAG_FORCED: getCurrentTrack(id).flagForced = value == 1; break; case ID_TRACK_TYPE: getCurrentTrack(id).type = (int) value; break; case ID_DEFAULT_DURATION: getCurrentTrack(id).defaultSampleDurationNs = (int) value; break; case ID_MAX_BLOCK_ADDITION_ID: getCurrentTrack(id).maxBlockAdditionId = (int) value; break; case ID_BLOCK_ADD_ID_TYPE: getCurrentTrack(id).blockAddIdType = (int) value; break; case ID_CODEC_DELAY: getCurrentTrack(id).codecDelayNs = value; break; case ID_SEEK_PRE_ROLL: getCurrentTrack(id).seekPreRollNs = value; break; case ID_CHANNELS: getCurrentTrack(id).channelCount = (int) value; break; case ID_AUDIO_BIT_DEPTH: getCurrentTrack(id).audioBitDepth = (int) value; break; case ID_REFERENCE_BLOCK: blockHasReferenceBlock = true; break; case ID_CONTENT_ENCODING_ORDER: // This extractor only supports one ContentEncoding element and hence the order has to be 0. if (value != 0) { throw ParserException.createForMalformedContainer( "ContentEncodingOrder " + value + " not supported", /* cause= */ null); } break; case ID_CONTENT_ENCODING_SCOPE: // This extractor only supports the scope of all frames. if (value != 1) { throw ParserException.createForMalformedContainer( "ContentEncodingScope " + value + " not supported", /* cause= */ null); } break; case ID_CONTENT_COMPRESSION_ALGORITHM: // This extractor only supports header stripping. if (value != 3) { throw ParserException.createForMalformedContainer( "ContentCompAlgo " + value + " not supported", /* cause= */ null); } break; case ID_CONTENT_ENCRYPTION_ALGORITHM: // Only the value 5 (AES) is allowed according to the WebM specification. if (value != 5) { throw ParserException.createForMalformedContainer( "ContentEncAlgo " + value + " not supported", /* cause= */ null); } break; case ID_CONTENT_ENCRYPTION_AES_SETTINGS_CIPHER_MODE: // Only the value 1 is allowed according to the WebM specification. if (value != 1) { throw ParserException.createForMalformedContainer( "AESSettingsCipherMode " + value + " not supported", /* cause= */ null); } break; case ID_CUE_TIME: assertInCues(id); cueTimesUs.add(scaleTimecodeToUs(value)); break; case ID_CUE_CLUSTER_POSITION: if (!seenClusterPositionForCurrentCuePoint) { assertInCues(id); // If there's more than one video/audio track, then there could be more than one // CueTrackPositions within a single CuePoint. In such a case, ignore all but the first // one (since the cluster position will be quite close for all the tracks). cueClusterPositions.add(value); seenClusterPositionForCurrentCuePoint = true; } break; case ID_TIME_CODE: clusterTimecodeUs = scaleTimecodeToUs(value); break; case ID_BLOCK_DURATION: blockDurationUs = scaleTimecodeToUs(value); break; case ID_STEREO_MODE: int layout = (int) value; assertInTrackEntry(id); switch (layout) { case 0: currentTrack.stereoMode = C.STEREO_MODE_MONO; break; case 1: currentTrack.stereoMode = C.STEREO_MODE_LEFT_RIGHT; break; case 3: currentTrack.stereoMode = C.STEREO_MODE_TOP_BOTTOM; break; case 15: currentTrack.stereoMode = C.STEREO_MODE_STEREO_MESH; break; default: break; } break; case ID_COLOUR_PRIMARIES: assertInTrackEntry(id); currentTrack.hasColorInfo = true; int colorSpace = ColorInfo.isoColorPrimariesToColorSpace((int) value); if (colorSpace != Format.NO_VALUE) { currentTrack.colorSpace = colorSpace; } break; case ID_COLOUR_TRANSFER: assertInTrackEntry(id); int colorTransfer = ColorInfo.isoTransferCharacteristicsToColorTransfer((int) value); if (colorTransfer != Format.NO_VALUE) { currentTrack.colorTransfer = colorTransfer; } break; case ID_COLOUR_RANGE: assertInTrackEntry(id); switch ((int) value) { case 1: // Broadcast range. currentTrack.colorRange = C.COLOR_RANGE_LIMITED; break; case 2: currentTrack.colorRange = C.COLOR_RANGE_FULL; break; default: break; } break; case ID_MAX_CLL: getCurrentTrack(id).maxContentLuminance = (int) value; break; case ID_MAX_FALL: getCurrentTrack(id).maxFrameAverageLuminance = (int) value; break; case ID_PROJECTION_TYPE: assertInTrackEntry(id); switch ((int) value) { case 0: currentTrack.projectionType = C.PROJECTION_RECTANGULAR; break; case 1: currentTrack.projectionType = C.PROJECTION_EQUIRECTANGULAR; break; case 2: currentTrack.projectionType = C.PROJECTION_CUBEMAP; break; case 3: currentTrack.projectionType = C.PROJECTION_MESH; break; default: break; } break; case ID_BLOCK_ADD_ID: blockAdditionalId = (int) value; break; default: break; } } /** * Called when a float element is encountered. * * @see EbmlProcessor#floatElement(int, double) */ @CallSuper protected void floatElement(int id, double value) throws ParserException { switch (id) { case ID_DURATION: durationTimecode = (long) value; break; case ID_SAMPLING_FREQUENCY: getCurrentTrack(id).sampleRate = (int) value; break; case ID_PRIMARY_R_CHROMATICITY_X: getCurrentTrack(id).primaryRChromaticityX = (float) value; break; case ID_PRIMARY_R_CHROMATICITY_Y: getCurrentTrack(id).primaryRChromaticityY = (float) value; break; case ID_PRIMARY_G_CHROMATICITY_X: getCurrentTrack(id).primaryGChromaticityX = (float) value; break; case ID_PRIMARY_G_CHROMATICITY_Y: getCurrentTrack(id).primaryGChromaticityY = (float) value; break; case ID_PRIMARY_B_CHROMATICITY_X: getCurrentTrack(id).primaryBChromaticityX = (float) value; break; case ID_PRIMARY_B_CHROMATICITY_Y: getCurrentTrack(id).primaryBChromaticityY = (float) value; break; case ID_WHITE_POINT_CHROMATICITY_X: getCurrentTrack(id).whitePointChromaticityX = (float) value; break; case ID_WHITE_POINT_CHROMATICITY_Y: getCurrentTrack(id).whitePointChromaticityY = (float) value; break; case ID_LUMNINANCE_MAX: getCurrentTrack(id).maxMasteringLuminance = (float) value; break; case ID_LUMNINANCE_MIN: getCurrentTrack(id).minMasteringLuminance = (float) value; break; case ID_PROJECTION_POSE_YAW: getCurrentTrack(id).projectionPoseYaw = (float) value; break; case ID_PROJECTION_POSE_PITCH: getCurrentTrack(id).projectionPosePitch = (float) value; break; case ID_PROJECTION_POSE_ROLL: getCurrentTrack(id).projectionPoseRoll = (float) value; break; default: break; } } /** * Called when a string element is encountered. * * @see EbmlProcessor#stringElement(int, String) */ @CallSuper protected void stringElement(int id, String value) throws ParserException { switch (id) { case ID_DOC_TYPE: // Validate that DocType is supported. if (!DOC_TYPE_WEBM.equals(value) && !DOC_TYPE_MATROSKA.equals(value)) { throw ParserException.createForMalformedContainer( "DocType " + value + " not supported", /* cause= */ null); } break; case ID_NAME: getCurrentTrack(id).name = value; break; case ID_CODEC_ID: getCurrentTrack(id).codecId = value; break; case ID_LANGUAGE: getCurrentTrack(id).language = value; break; default: break; } } /** * Called when a binary element is encountered. * * @see EbmlProcessor#binaryElement(int, int, ExtractorInput) */ @CallSuper protected void binaryElement(int id, int contentSize, ExtractorInput input) throws IOException { switch (id) { case ID_SEEK_ID: Arrays.fill(seekEntryIdBytes.getData(), (byte) 0); input.readFully(seekEntryIdBytes.getData(), 4 - contentSize, contentSize); seekEntryIdBytes.setPosition(0); seekEntryId = (int) seekEntryIdBytes.readUnsignedInt(); break; case ID_BLOCK_ADD_ID_EXTRA_DATA: handleBlockAddIDExtraData(getCurrentTrack(id), input, contentSize); break; case ID_CODEC_PRIVATE: assertInTrackEntry(id); currentTrack.codecPrivate = new byte[contentSize]; input.readFully(currentTrack.codecPrivate, 0, contentSize); break; case ID_PROJECTION_PRIVATE: assertInTrackEntry(id); currentTrack.projectionData = new byte[contentSize]; input.readFully(currentTrack.projectionData, 0, contentSize); break; case ID_CONTENT_COMPRESSION_SETTINGS: assertInTrackEntry(id); // This extractor only supports header stripping, so the payload is the stripped bytes. currentTrack.sampleStrippedBytes = new byte[contentSize]; input.readFully(currentTrack.sampleStrippedBytes, 0, contentSize); break; case ID_CONTENT_ENCRYPTION_KEY_ID: byte[] encryptionKey = new byte[contentSize]; input.readFully(encryptionKey, 0, contentSize); getCurrentTrack(id).cryptoData = new TrackOutput.CryptoData( C.CRYPTO_MODE_AES_CTR, encryptionKey, 0, 0); // We assume patternless AES-CTR. break; case ID_SIMPLE_BLOCK: case ID_BLOCK: // Please refer to http://www.matroska.org/technical/specs/index.html#simpleblock_structure // and http://matroska.org/technical/specs/index.html#block_structure // for info about how data is organized in SimpleBlock and Block elements respectively. They // differ only in the way flags are specified. if (blockState == BLOCK_STATE_START) { blockTrackNumber = (int) varintReader.readUnsignedVarint(input, false, true, 8); blockTrackNumberLength = varintReader.getLastLength(); blockDurationUs = C.TIME_UNSET; blockState = BLOCK_STATE_HEADER; scratch.reset(/* limit= */ 0); } Track track = tracks.get(blockTrackNumber); // Ignore the block if we don't know about the track to which it belongs. if (track == null) { input.skipFully(contentSize - blockTrackNumberLength); blockState = BLOCK_STATE_START; return; } track.assertOutputInitialized(); if (blockState == BLOCK_STATE_HEADER) { // Read the relative timecode (2 bytes) and flags (1 byte). readScratch(input, 3); int lacing = (scratch.getData()[2] & 0x06) >> 1; if (lacing == LACING_NONE) { blockSampleCount = 1; blockSampleSizes = ensureArrayCapacity(blockSampleSizes, 1); blockSampleSizes[0] = contentSize - blockTrackNumberLength - 3; } else { // Read the sample count (1 byte). readScratch(input, 4); blockSampleCount = (scratch.getData()[3] & 0xFF) + 1; blockSampleSizes = ensureArrayCapacity(blockSampleSizes, blockSampleCount); if (lacing == LACING_FIXED_SIZE) { int blockLacingSampleSize = (contentSize - blockTrackNumberLength - 4) / blockSampleCount; Arrays.fill(blockSampleSizes, 0, blockSampleCount, blockLacingSampleSize); } else if (lacing == LACING_XIPH) { int totalSamplesSize = 0; int headerSize = 4; for (int sampleIndex = 0; sampleIndex < blockSampleCount - 1; sampleIndex++) { blockSampleSizes[sampleIndex] = 0; int byteValue; do { readScratch(input, ++headerSize); byteValue = scratch.getData()[headerSize - 1] & 0xFF; blockSampleSizes[sampleIndex] += byteValue; } while (byteValue == 0xFF); totalSamplesSize += blockSampleSizes[sampleIndex]; } blockSampleSizes[blockSampleCount - 1] = contentSize - blockTrackNumberLength - headerSize - totalSamplesSize; } else if (lacing == LACING_EBML) { int totalSamplesSize = 0; int headerSize = 4; for (int sampleIndex = 0; sampleIndex < blockSampleCount - 1; sampleIndex++) { blockSampleSizes[sampleIndex] = 0; readScratch(input, ++headerSize); if (scratch.getData()[headerSize - 1] == 0) { throw ParserException.createForMalformedContainer( "No valid varint length mask found", /* cause= */ null); } long readValue = 0; for (int i = 0; i < 8; i++) { int lengthMask = 1 << (7 - i); if ((scratch.getData()[headerSize - 1] & lengthMask) != 0) { int readPosition = headerSize - 1; headerSize += i; readScratch(input, headerSize); readValue = (scratch.getData()[readPosition++] & 0xFF) & ~lengthMask; while (readPosition < headerSize) { readValue <<= 8; readValue |= (scratch.getData()[readPosition++] & 0xFF); } // The first read value is the first size. Later values are signed offsets. if (sampleIndex > 0) { readValue -= (1L << (6 + i * 7)) - 1; } break; } } if (readValue < Integer.MIN_VALUE || readValue > Integer.MAX_VALUE) { throw ParserException.createForMalformedContainer( "EBML lacing sample size out of range.", /* cause= */ null); } int intReadValue = (int) readValue; blockSampleSizes[sampleIndex] = sampleIndex == 0 ? intReadValue : blockSampleSizes[sampleIndex - 1] + intReadValue; totalSamplesSize += blockSampleSizes[sampleIndex]; } blockSampleSizes[blockSampleCount - 1] = contentSize - blockTrackNumberLength - headerSize - totalSamplesSize; } else { // Lacing is always in the range 0--3. throw ParserException.createForMalformedContainer( "Unexpected lacing value: " + lacing, /* cause= */ null); } } int timecode = (scratch.getData()[0] << 8) | (scratch.getData()[1] & 0xFF); blockTimeUs = clusterTimecodeUs + scaleTimecodeToUs(timecode); boolean isKeyframe = track.type == TRACK_TYPE_AUDIO || (id == ID_SIMPLE_BLOCK && (scratch.getData()[2] & 0x80) == 0x80); blockFlags = isKeyframe ? C.BUFFER_FLAG_KEY_FRAME : 0; blockState = BLOCK_STATE_DATA; blockSampleIndex = 0; } if (id == ID_SIMPLE_BLOCK) { // For SimpleBlock, we can write sample data and immediately commit the corresponding // sample metadata. while (blockSampleIndex < blockSampleCount) { int sampleSize = writeSampleData(input, track, blockSampleSizes[blockSampleIndex]); long sampleTimeUs = blockTimeUs + (blockSampleIndex * track.defaultSampleDurationNs) / 1000; commitSampleToOutput(track, sampleTimeUs, blockFlags, sampleSize, /* offset= */ 0); blockSampleIndex++; } blockState = BLOCK_STATE_START; } else { // For Block, we need to wait until the end of the BlockGroup element before committing // sample metadata. This is so that we can handle ReferenceBlock (which can be used to // infer whether the first sample in the block is a keyframe), and BlockAdditions (which // can contain additional sample data to append) contained in the block group. Just output // the sample data, storing the final sample sizes for when we commit the metadata. while (blockSampleIndex < blockSampleCount) { blockSampleSizes[blockSampleIndex] = writeSampleData(input, track, blockSampleSizes[blockSampleIndex]); blockSampleIndex++; } } break; case ID_BLOCK_ADDITIONAL: if (blockState != BLOCK_STATE_DATA) { return; } handleBlockAdditionalData( tracks.get(blockTrackNumber), blockAdditionalId, input, contentSize); break; default: throw ParserException.createForMalformedContainer( "Unexpected id: " + id, /* cause= */ null); } } protected void handleBlockAddIDExtraData(Track track, ExtractorInput input, int contentSize) throws IOException { if (track.blockAddIdType == BLOCK_ADD_ID_TYPE_DVVC || track.blockAddIdType == BLOCK_ADD_ID_TYPE_DVCC) { track.dolbyVisionConfigBytes = new byte[contentSize]; input.readFully(track.dolbyVisionConfigBytes, 0, contentSize); } else { // Unhandled BlockAddIDExtraData. input.skipFully(contentSize); } } protected void handleBlockAdditionalData( Track track, int blockAdditionalId, ExtractorInput input, int contentSize) throws IOException { if (blockAdditionalId == BLOCK_ADDITIONAL_ID_VP9_ITU_T_35 && CODEC_ID_VP9.equals(track.codecId)) { blockAdditionalData.reset(contentSize); input.readFully(blockAdditionalData.getData(), 0, contentSize); } else { // Unhandled block additional data. input.skipFully(contentSize); } } @EnsuresNonNull("currentTrack") private void assertInTrackEntry(int id) throws ParserException { if (currentTrack == null) { throw ParserException.createForMalformedContainer( "Element " + id + " must be in a TrackEntry", /* cause= */ null); } } @EnsuresNonNull({"cueTimesUs", "cueClusterPositions"}) private void assertInCues(int id) throws ParserException { if (cueTimesUs == null || cueClusterPositions == null) { throw ParserException.createForMalformedContainer( "Element " + id + " must be in a Cues", /* cause= */ null); } } /** * Returns the track corresponding to the current TrackEntry element. * * @throws ParserException if the element id is not in a TrackEntry. */ protected Track getCurrentTrack(int currentElementId) throws ParserException { assertInTrackEntry(currentElementId); return currentTrack; } @RequiresNonNull("#1.output") private void commitSampleToOutput( Track track, long timeUs, @C.BufferFlags int flags, int size, int offset) { if (track.trueHdSampleRechunker != null) { track.trueHdSampleRechunker.sampleMetadata( track.output, timeUs, flags, size, offset, track.cryptoData); } else { if (CODEC_ID_SUBRIP.equals(track.codecId) || CODEC_ID_ASS.equals(track.codecId) || CODEC_ID_VTT.equals(track.codecId)) { if (blockSampleCount > 1) { Log.w(TAG, "Skipping subtitle sample in laced block."); } else if (blockDurationUs == C.TIME_UNSET) { Log.w(TAG, "Skipping subtitle sample with no duration."); } else { setSubtitleEndTime(track.codecId, blockDurationUs, subtitleSample.getData()); // The Matroska spec doesn't clearly define whether subtitle samples are null-terminated // or the sample should instead be sized precisely. We truncate the sample at a null-byte // to gracefully handle null-terminated strings followed by garbage bytes. for (int i = subtitleSample.getPosition(); i < subtitleSample.limit(); i++) { if (subtitleSample.getData()[i] == 0) { subtitleSample.setLimit(i); break; } } // Note: If we ever want to support DRM protected subtitles then we'll need to output the // appropriate encryption data here. track.output.sampleData(subtitleSample, subtitleSample.limit()); size += subtitleSample.limit(); } } if ((flags & C.BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA) != 0) { if (blockSampleCount > 1) { // There were multiple samples in the block. Appending the additional data to the last // sample doesn't make sense. Skip instead. flags &= ~C.BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA; } else { // Append supplemental data. int blockAdditionalSize = blockAdditionalData.limit(); track.output.sampleData( blockAdditionalData, blockAdditionalSize, TrackOutput.SAMPLE_DATA_PART_SUPPLEMENTAL); size += blockAdditionalSize; } } track.output.sampleMetadata(timeUs, flags, size, offset, track.cryptoData); } haveOutputSample = true; } /** * Ensures {@link #scratch} contains at least {@code requiredLength} bytes of data, reading from * the extractor input if necessary. */ private void readScratch(ExtractorInput input, int requiredLength) throws IOException { if (scratch.limit() >= requiredLength) { return; } if (scratch.capacity() < requiredLength) { scratch.ensureCapacity(max(scratch.capacity() * 2, requiredLength)); } input.readFully(scratch.getData(), scratch.limit(), requiredLength - scratch.limit()); scratch.setLimit(requiredLength); } /** * Writes data for a single sample to the track output. * * @param input The input from which to read sample data. * @param track The track to output the sample to. * @param size The size of the sample data on the input side. * @return The final size of the written sample. * @throws IOException If an error occurs reading from the input. */ @RequiresNonNull("#2.output") private int writeSampleData(ExtractorInput input, Track track, int size) throws IOException { if (CODEC_ID_SUBRIP.equals(track.codecId)) { writeSubtitleSampleData(input, SUBRIP_PREFIX, size); return finishWriteSampleData(); } else if (CODEC_ID_ASS.equals(track.codecId)) { writeSubtitleSampleData(input, SSA_PREFIX, size); return finishWriteSampleData(); } else if (CODEC_ID_VTT.equals(track.codecId)) { writeSubtitleSampleData(input, VTT_PREFIX, size); return finishWriteSampleData(); } TrackOutput output = track.output; if (!sampleEncodingHandled) { if (track.hasContentEncryption) { // If the sample is encrypted, read its encryption signal byte and set the IV size. // Clear the encrypted flag. blockFlags &= ~C.BUFFER_FLAG_ENCRYPTED; if (!sampleSignalByteRead) { input.readFully(scratch.getData(), 0, 1); sampleBytesRead++; if ((scratch.getData()[0] & 0x80) == 0x80) { throw ParserException.createForMalformedContainer( "Extension bit is set in signal byte", /* cause= */ null); } sampleSignalByte = scratch.getData()[0]; sampleSignalByteRead = true; } boolean isEncrypted = (sampleSignalByte & 0x01) == 0x01; if (isEncrypted) { boolean hasSubsampleEncryption = (sampleSignalByte & 0x02) == 0x02; blockFlags |= C.BUFFER_FLAG_ENCRYPTED; if (!sampleInitializationVectorRead) { input.readFully(encryptionInitializationVector.getData(), 0, ENCRYPTION_IV_SIZE); sampleBytesRead += ENCRYPTION_IV_SIZE; sampleInitializationVectorRead = true; // Write the signal byte, containing the IV size and the subsample encryption flag. scratch.getData()[0] = (byte) (ENCRYPTION_IV_SIZE | (hasSubsampleEncryption ? 0x80 : 0x00)); scratch.setPosition(0); output.sampleData(scratch, 1, TrackOutput.SAMPLE_DATA_PART_ENCRYPTION); sampleBytesWritten++; // Write the IV. encryptionInitializationVector.setPosition(0); output.sampleData( encryptionInitializationVector, ENCRYPTION_IV_SIZE, TrackOutput.SAMPLE_DATA_PART_ENCRYPTION); sampleBytesWritten += ENCRYPTION_IV_SIZE; } if (hasSubsampleEncryption) { if (!samplePartitionCountRead) { input.readFully(scratch.getData(), 0, 1); sampleBytesRead++; scratch.setPosition(0); samplePartitionCount = scratch.readUnsignedByte(); samplePartitionCountRead = true; } int samplePartitionDataSize = samplePartitionCount * 4; scratch.reset(samplePartitionDataSize); input.readFully(scratch.getData(), 0, samplePartitionDataSize); sampleBytesRead += samplePartitionDataSize; short subsampleCount = (short) (1 + (samplePartitionCount / 2)); int subsampleDataSize = 2 + 6 * subsampleCount; if (encryptionSubsampleDataBuffer == null || encryptionSubsampleDataBuffer.capacity() < subsampleDataSize) { encryptionSubsampleDataBuffer = ByteBuffer.allocate(subsampleDataSize); } encryptionSubsampleDataBuffer.position(0); encryptionSubsampleDataBuffer.putShort(subsampleCount); // Loop through the partition offsets and write out the data in the way ExoPlayer // wants it (ISO 23001-7 Part 7): // 2 bytes - sub sample count. // for each sub sample: // 2 bytes - clear data size. // 4 bytes - encrypted data size. int partitionOffset = 0; for (int i = 0; i < samplePartitionCount; i++) { int previousPartitionOffset = partitionOffset; partitionOffset = scratch.readUnsignedIntToInt(); if ((i % 2) == 0) { encryptionSubsampleDataBuffer.putShort( (short) (partitionOffset - previousPartitionOffset)); } else { encryptionSubsampleDataBuffer.putInt(partitionOffset - previousPartitionOffset); } } int finalPartitionSize = size - sampleBytesRead - partitionOffset; if ((samplePartitionCount % 2) == 1) { encryptionSubsampleDataBuffer.putInt(finalPartitionSize); } else { encryptionSubsampleDataBuffer.putShort((short) finalPartitionSize); encryptionSubsampleDataBuffer.putInt(0); } encryptionSubsampleData.reset(encryptionSubsampleDataBuffer.array(), subsampleDataSize); output.sampleData( encryptionSubsampleData, subsampleDataSize, TrackOutput.SAMPLE_DATA_PART_ENCRYPTION); sampleBytesWritten += subsampleDataSize; } } } else if (track.sampleStrippedBytes != null) { // If the sample has header stripping, prepare to read/output the stripped bytes first. sampleStrippedBytes.reset(track.sampleStrippedBytes, track.sampleStrippedBytes.length); } if (track.maxBlockAdditionId > 0) { blockFlags |= C.BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA; blockAdditionalData.reset(/* limit= */ 0); // If there is supplemental data, the structure of the sample data is: // sample size (4 bytes) || sample data || supplemental data scratch.reset(/* limit= */ 4); scratch.getData()[0] = (byte) ((size >> 24) & 0xFF); scratch.getData()[1] = (byte) ((size >> 16) & 0xFF); scratch.getData()[2] = (byte) ((size >> 8) & 0xFF); scratch.getData()[3] = (byte) (size & 0xFF); output.sampleData(scratch, 4, TrackOutput.SAMPLE_DATA_PART_SUPPLEMENTAL); sampleBytesWritten += 4; } sampleEncodingHandled = true; } size += sampleStrippedBytes.limit(); if (CODEC_ID_H264.equals(track.codecId) || CODEC_ID_H265.equals(track.codecId)) { // TODO: Deduplicate with Mp4Extractor. // Zero the top three bytes of the array that we'll use to decode nal unit lengths, in case // they're only 1 or 2 bytes long. byte[] nalLengthData = nalLength.getData(); nalLengthData[0] = 0; nalLengthData[1] = 0; nalLengthData[2] = 0; int nalUnitLengthFieldLength = track.nalUnitLengthFieldLength; int nalUnitLengthFieldLengthDiff = 4 - track.nalUnitLengthFieldLength; // NAL units are length delimited, but the decoder requires start code delimited units. // Loop until we've written the sample to the track output, replacing length delimiters with // start codes as we encounter them. while (sampleBytesRead < size) { if (sampleCurrentNalBytesRemaining == 0) { // Read the NAL length so that we know where we find the next one. writeToTarget( input, nalLengthData, nalUnitLengthFieldLengthDiff, nalUnitLengthFieldLength); sampleBytesRead += nalUnitLengthFieldLength; nalLength.setPosition(0); sampleCurrentNalBytesRemaining = nalLength.readUnsignedIntToInt(); // Write a start code for the current NAL unit. nalStartCode.setPosition(0); output.sampleData(nalStartCode, 4); sampleBytesWritten += 4; } else { // Write the payload of the NAL unit. int bytesWritten = writeToOutput(input, output, sampleCurrentNalBytesRemaining); sampleBytesRead += bytesWritten; sampleBytesWritten += bytesWritten; sampleCurrentNalBytesRemaining -= bytesWritten; } } } else { if (track.trueHdSampleRechunker != null) { checkState(sampleStrippedBytes.limit() == 0); track.trueHdSampleRechunker.startSample(input); } while (sampleBytesRead < size) { int bytesWritten = writeToOutput(input, output, size - sampleBytesRead); sampleBytesRead += bytesWritten; sampleBytesWritten += bytesWritten; } } if (CODEC_ID_VORBIS.equals(track.codecId)) { // Vorbis decoder in android MediaCodec [1] expects the last 4 bytes of the sample to be the // number of samples in the current page. This definition holds good only for Ogg and // irrelevant for Matroska. So we always set this to -1 (the decoder will ignore this value if // we set it to -1). The android platform media extractor [2] does the same. // [1] // https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp#314 // [2] // https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/NuMediaExtractor.cpp#474 vorbisNumPageSamples.setPosition(0); output.sampleData(vorbisNumPageSamples, 4); sampleBytesWritten += 4; } return finishWriteSampleData(); } /** * Called by {@link #writeSampleData(ExtractorInput, Track, int)} when the sample has been * written. Returns the final sample size and resets state for the next sample. */ private int finishWriteSampleData() { int sampleSize = sampleBytesWritten; resetWriteSampleData(); return sampleSize; } /** Resets state used by {@link #writeSampleData(ExtractorInput, Track, int)}. */ private void resetWriteSampleData() { sampleBytesRead = 0; sampleBytesWritten = 0; sampleCurrentNalBytesRemaining = 0; sampleEncodingHandled = false; sampleSignalByteRead = false; samplePartitionCountRead = false; samplePartitionCount = 0; sampleSignalByte = (byte) 0; sampleInitializationVectorRead = false; sampleStrippedBytes.reset(/* limit= */ 0); } private void writeSubtitleSampleData(ExtractorInput input, byte[] samplePrefix, int size) throws IOException { int sizeWithPrefix = samplePrefix.length + size; if (subtitleSample.capacity() < sizeWithPrefix) { // Initialize subripSample to contain the required prefix and have space to hold a subtitle // twice as long as this one. subtitleSample.reset(Arrays.copyOf(samplePrefix, sizeWithPrefix + size)); } else { System.arraycopy(samplePrefix, 0, subtitleSample.getData(), 0, samplePrefix.length); } input.readFully(subtitleSample.getData(), samplePrefix.length, size); subtitleSample.setPosition(0); subtitleSample.setLimit(sizeWithPrefix); // Defer writing the data to the track output. We need to modify the sample data by setting // the correct end timecode, which we might not have yet. } /** * Overwrites the end timecode in {@code subtitleData} with the correctly formatted time derived * from {@code durationUs}. * * <p>See documentation on {@link #SSA_DIALOGUE_FORMAT} and {@link #SUBRIP_PREFIX} for why we use * the duration as the end timecode. * * @param codecId The subtitle codec; must be {@link #CODEC_ID_SUBRIP}, {@link #CODEC_ID_ASS} or * {@link #CODEC_ID_VTT}. * @param durationUs The duration of the sample, in microseconds. * @param subtitleData The subtitle sample in which to overwrite the end timecode (output * parameter). */ private static void setSubtitleEndTime(String codecId, long durationUs, byte[] subtitleData) { byte[] endTimecode; int endTimecodeOffset; switch (codecId) { case CODEC_ID_SUBRIP: endTimecode = formatSubtitleTimecode( durationUs, SUBRIP_TIMECODE_FORMAT, SUBRIP_TIMECODE_LAST_VALUE_SCALING_FACTOR); endTimecodeOffset = SUBRIP_PREFIX_END_TIMECODE_OFFSET; break; case CODEC_ID_ASS: endTimecode = formatSubtitleTimecode( durationUs, SSA_TIMECODE_FORMAT, SSA_TIMECODE_LAST_VALUE_SCALING_FACTOR); endTimecodeOffset = SSA_PREFIX_END_TIMECODE_OFFSET; break; case CODEC_ID_VTT: endTimecode = formatSubtitleTimecode( durationUs, VTT_TIMECODE_FORMAT, VTT_TIMECODE_LAST_VALUE_SCALING_FACTOR); endTimecodeOffset = VTT_PREFIX_END_TIMECODE_OFFSET; break; default: throw new IllegalArgumentException(); } System.arraycopy(endTimecode, 0, subtitleData, endTimecodeOffset, endTimecode.length); } /** * Formats {@code timeUs} using {@code timecodeFormat}, and sets it as the end timecode in {@code * subtitleSampleData}. */ private static byte[] formatSubtitleTimecode( long timeUs, String timecodeFormat, long lastTimecodeValueScalingFactor) { checkArgument(timeUs != C.TIME_UNSET); byte[] timeCodeData; int hours = (int) (timeUs / (3600 * C.MICROS_PER_SECOND)); timeUs -= (hours * 3600 * C.MICROS_PER_SECOND); int minutes = (int) (timeUs / (60 * C.MICROS_PER_SECOND)); timeUs -= (minutes * 60 * C.MICROS_PER_SECOND); int seconds = (int) (timeUs / C.MICROS_PER_SECOND); timeUs -= (seconds * C.MICROS_PER_SECOND); int lastValue = (int) (timeUs / lastTimecodeValueScalingFactor); timeCodeData = Util.getUtf8Bytes( String.format(Locale.US, timecodeFormat, hours, minutes, seconds, lastValue)); return timeCodeData; } /** * Writes {@code length} bytes of sample data into {@code target} at {@code offset}, consisting of * pending {@link #sampleStrippedBytes} and any remaining data read from {@code input}. */ private void writeToTarget(ExtractorInput input, byte[] target, int offset, int length) throws IOException { int pendingStrippedBytes = min(length, sampleStrippedBytes.bytesLeft()); input.readFully(target, offset + pendingStrippedBytes, length - pendingStrippedBytes); if (pendingStrippedBytes > 0) { sampleStrippedBytes.readBytes(target, offset, pendingStrippedBytes); } } /** * Outputs up to {@code length} bytes of sample data to {@code output}, consisting of either * {@link #sampleStrippedBytes} or data read from {@code input}. */ private int writeToOutput(ExtractorInput input, TrackOutput output, int length) throws IOException { int bytesWritten; int strippedBytesLeft = sampleStrippedBytes.bytesLeft(); if (strippedBytesLeft > 0) { bytesWritten = min(length, strippedBytesLeft); output.sampleData(sampleStrippedBytes, bytesWritten); } else { bytesWritten = output.sampleData(input, length, false); } return bytesWritten; } /** * Builds a {@link SeekMap} from the recently gathered Cues information. * * @return The built {@link SeekMap}. The returned {@link SeekMap} may be unseekable if cues * information was missing or incomplete. */ private SeekMap buildSeekMap( @Nullable LongArray cueTimesUs, @Nullable LongArray cueClusterPositions) { if (segmentContentPosition == C.POSITION_UNSET || durationUs == C.TIME_UNSET || cueTimesUs == null || cueTimesUs.size() == 0 || cueClusterPositions == null || cueClusterPositions.size() != cueTimesUs.size()) { // Cues information is missing or incomplete. return new SeekMap.Unseekable(durationUs); } int cuePointsSize = cueTimesUs.size(); int[] sizes = new int[cuePointsSize]; long[] offsets = new long[cuePointsSize]; long[] durationsUs = new long[cuePointsSize]; long[] timesUs = new long[cuePointsSize]; for (int i = 0; i < cuePointsSize; i++) { timesUs[i] = cueTimesUs.get(i); offsets[i] = segmentContentPosition + cueClusterPositions.get(i); } for (int i = 0; i < cuePointsSize - 1; i++) { sizes[i] = (int) (offsets[i + 1] - offsets[i]); durationsUs[i] = timesUs[i + 1] - timesUs[i]; } sizes[cuePointsSize - 1] = (int) (segmentContentPosition + segmentContentSize - offsets[cuePointsSize - 1]); durationsUs[cuePointsSize - 1] = durationUs - timesUs[cuePointsSize - 1]; long lastDurationUs = durationsUs[cuePointsSize - 1]; if (lastDurationUs <= 0) { Log.w(TAG, "Discarding last cue point with unexpected duration: " + lastDurationUs); sizes = Arrays.copyOf(sizes, sizes.length - 1); offsets = Arrays.copyOf(offsets, offsets.length - 1); durationsUs = Arrays.copyOf(durationsUs, durationsUs.length - 1); timesUs = Arrays.copyOf(timesUs, timesUs.length - 1); } return new ChunkIndex(sizes, offsets, durationsUs, timesUs); } /** * Updates the position of the holder to Cues element's position if the extractor configuration * permits use of master seek entry. After building Cues sets the holder's position back to where * it was before. * * @param seekPosition The holder whose position will be updated. * @param currentPosition Current position of the input. * @return Whether the seek position was updated. */ private boolean maybeSeekForCues(PositionHolder seekPosition, long currentPosition) { if (seekForCues) { seekPositionAfterBuildingCues = currentPosition; seekPosition.position = cuesContentPosition; seekForCues = false; return true; } // After parsing Cues, seek back to original position if available. We will not do this unless // we seeked to get to the Cues in the first place. if (sentSeekMap && seekPositionAfterBuildingCues != C.POSITION_UNSET) { seekPosition.position = seekPositionAfterBuildingCues; seekPositionAfterBuildingCues = C.POSITION_UNSET; return true; } return false; } private long scaleTimecodeToUs(long unscaledTimecode) throws ParserException { if (timecodeScale == C.TIME_UNSET) { throw ParserException.createForMalformedContainer( "Can't scale timecode prior to timecodeScale being set.", /* cause= */ null); } return Util.scaleLargeTimestamp(unscaledTimecode, timecodeScale, 1000); } private static boolean isCodecSupported(String codecId) { switch (codecId) { case CODEC_ID_VP8: case CODEC_ID_VP9: case CODEC_ID_AV1: case CODEC_ID_MPEG2: case CODEC_ID_MPEG4_SP: case CODEC_ID_MPEG4_ASP: case CODEC_ID_MPEG4_AP: case CODEC_ID_H264: case CODEC_ID_H265: case CODEC_ID_FOURCC: case CODEC_ID_THEORA: case CODEC_ID_OPUS: case CODEC_ID_VORBIS: case CODEC_ID_AAC: case CODEC_ID_MP2: case CODEC_ID_MP3: case CODEC_ID_AC3: case CODEC_ID_E_AC3: case CODEC_ID_TRUEHD: case CODEC_ID_DTS: case CODEC_ID_DTS_EXPRESS: case CODEC_ID_DTS_LOSSLESS: case CODEC_ID_FLAC: case CODEC_ID_ACM: case CODEC_ID_PCM_INT_LIT: case CODEC_ID_PCM_INT_BIG: case CODEC_ID_PCM_FLOAT: case CODEC_ID_SUBRIP: case CODEC_ID_ASS: case CODEC_ID_VTT: case CODEC_ID_VOBSUB: case CODEC_ID_PGS: case CODEC_ID_DVBSUB: return true; default: return false; } } /** * Returns an array that can store (at least) {@code length} elements, which will be either a new * array or {@code array} if it's not null and large enough. */ private static int[] ensureArrayCapacity(@Nullable int[] array, int length) { if (array == null) { return new int[length]; } else if (array.length >= length) { return array; } else { // Double the size to avoid allocating constantly if the required length increases gradually. return new int[max(array.length * 2, length)]; } } @EnsuresNonNull("extractorOutput") private void assertInitialized() { checkStateNotNull(extractorOutput); } /** Passes events through to the outer {@link MatroskaExtractor}. */ private final class InnerEbmlProcessor implements EbmlProcessor { @Override public @ElementType int getElementType(int id) { return MatroskaExtractor.this.getElementType(id); } @Override public boolean isLevel1Element(int id) { return MatroskaExtractor.this.isLevel1Element(id); } @Override public void startMasterElement(int id, long contentPosition, long contentSize) throws ParserException { MatroskaExtractor.this.startMasterElement(id, contentPosition, contentSize); } @Override public void endMasterElement(int id) throws ParserException { MatroskaExtractor.this.endMasterElement(id); } @Override public void integerElement(int id, long value) throws ParserException { MatroskaExtractor.this.integerElement(id, value); } @Override public void floatElement(int id, double value) throws ParserException { MatroskaExtractor.this.floatElement(id, value); } @Override public void stringElement(int id, String value) throws ParserException { MatroskaExtractor.this.stringElement(id, value); } @Override public void binaryElement(int id, int contentsSize, ExtractorInput input) throws IOException { MatroskaExtractor.this.binaryElement(id, contentsSize, input); } } /** Holds data corresponding to a single track. */ protected static final class Track { private static final int DISPLAY_UNIT_PIXELS = 0; private static final int MAX_CHROMATICITY = 50_000; // Defined in CTA-861.3. /** Default max content light level (CLL) that should be encoded into hdrStaticInfo. */ private static final int DEFAULT_MAX_CLL = 1000; // nits. /** Default frame-average light level (FALL) that should be encoded into hdrStaticInfo. */ private static final int DEFAULT_MAX_FALL = 200; // nits. // Common elements. public @MonotonicNonNull String name; public @MonotonicNonNull String codecId; public int number; public int type; public int defaultSampleDurationNs; public int maxBlockAdditionId; private int blockAddIdType; public boolean hasContentEncryption; public byte @MonotonicNonNull [] sampleStrippedBytes; public TrackOutput.@MonotonicNonNull CryptoData cryptoData; public byte @MonotonicNonNull [] codecPrivate; public @MonotonicNonNull DrmInitData drmInitData; // Video elements. public int width = Format.NO_VALUE; public int height = Format.NO_VALUE; public int displayWidth = Format.NO_VALUE; public int displayHeight = Format.NO_VALUE; public int displayUnit = DISPLAY_UNIT_PIXELS; public @C.Projection int projectionType = Format.NO_VALUE; public float projectionPoseYaw = 0f; public float projectionPosePitch = 0f; public float projectionPoseRoll = 0f; public byte @MonotonicNonNull [] projectionData = null; public @C.StereoMode int stereoMode = Format.NO_VALUE; public boolean hasColorInfo = false; public @C.ColorSpace int colorSpace = Format.NO_VALUE; public @C.ColorTransfer int colorTransfer = Format.NO_VALUE; public @C.ColorRange int colorRange = Format.NO_VALUE; public int maxContentLuminance = DEFAULT_MAX_CLL; public int maxFrameAverageLuminance = DEFAULT_MAX_FALL; public float primaryRChromaticityX = Format.NO_VALUE; public float primaryRChromaticityY = Format.NO_VALUE; public float primaryGChromaticityX = Format.NO_VALUE; public float primaryGChromaticityY = Format.NO_VALUE; public float primaryBChromaticityX = Format.NO_VALUE; public float primaryBChromaticityY = Format.NO_VALUE; public float whitePointChromaticityX = Format.NO_VALUE; public float whitePointChromaticityY = Format.NO_VALUE; public float maxMasteringLuminance = Format.NO_VALUE; public float minMasteringLuminance = Format.NO_VALUE; public byte @MonotonicNonNull [] dolbyVisionConfigBytes; // Audio elements. Initially set to their default values. public int channelCount = 1; public int audioBitDepth = Format.NO_VALUE; public int sampleRate = 8000; public long codecDelayNs = 0; public long seekPreRollNs = 0; public @MonotonicNonNull TrueHdSampleRechunker trueHdSampleRechunker; // Text elements. public boolean flagForced; public boolean flagDefault = true; private String language = "eng"; // Set when the output is initialized. nalUnitLengthFieldLength is only set for H264/H265. public @MonotonicNonNull TrackOutput output; public int nalUnitLengthFieldLength; /** Initializes the track with an output. */ @RequiresNonNull("codecId") @EnsuresNonNull("this.output") public void initializeOutput(ExtractorOutput output, int trackId) throws ParserException { String mimeType; int maxInputSize = Format.NO_VALUE; @C.PcmEncoding int pcmEncoding = Format.NO_VALUE; @Nullable List<byte[]> initializationData = null; @Nullable String codecs = null; switch (codecId) { case CODEC_ID_VP8: mimeType = MimeTypes.VIDEO_VP8; break; case CODEC_ID_VP9: mimeType = MimeTypes.VIDEO_VP9; break; case CODEC_ID_AV1: mimeType = MimeTypes.VIDEO_AV1; break; case CODEC_ID_MPEG2: mimeType = MimeTypes.VIDEO_MPEG2; break; case CODEC_ID_MPEG4_SP: case CODEC_ID_MPEG4_ASP: case CODEC_ID_MPEG4_AP: mimeType = MimeTypes.VIDEO_MP4V; initializationData = codecPrivate == null ? null : Collections.singletonList(codecPrivate); break; case CODEC_ID_H264: mimeType = MimeTypes.VIDEO_H264; AvcConfig avcConfig = AvcConfig.parse(new ParsableByteArray(getCodecPrivate(codecId))); initializationData = avcConfig.initializationData; nalUnitLengthFieldLength = avcConfig.nalUnitLengthFieldLength; codecs = avcConfig.codecs; break; case CODEC_ID_H265: mimeType = MimeTypes.VIDEO_H265; HevcConfig hevcConfig = HevcConfig.parse(new ParsableByteArray(getCodecPrivate(codecId))); initializationData = hevcConfig.initializationData; nalUnitLengthFieldLength = hevcConfig.nalUnitLengthFieldLength; codecs = hevcConfig.codecs; break; case CODEC_ID_FOURCC: Pair<String, @NullableType List<byte[]>> pair = parseFourCcPrivate(new ParsableByteArray(getCodecPrivate(codecId))); mimeType = pair.first; initializationData = pair.second; break; case CODEC_ID_THEORA: // TODO: This can be set to the real mimeType if/when we work out what initializationData // should be set to for this case. mimeType = MimeTypes.VIDEO_UNKNOWN; break; case CODEC_ID_VORBIS: mimeType = MimeTypes.AUDIO_VORBIS; maxInputSize = VORBIS_MAX_INPUT_SIZE; initializationData = parseVorbisCodecPrivate(getCodecPrivate(codecId)); break; case CODEC_ID_OPUS: mimeType = MimeTypes.AUDIO_OPUS; maxInputSize = OPUS_MAX_INPUT_SIZE; initializationData = new ArrayList<>(3); initializationData.add(getCodecPrivate(codecId)); initializationData.add( ByteBuffer.allocate(8).order(ByteOrder.LITTLE_ENDIAN).putLong(codecDelayNs).array()); initializationData.add( ByteBuffer.allocate(8).order(ByteOrder.LITTLE_ENDIAN).putLong(seekPreRollNs).array()); break; case CODEC_ID_AAC: mimeType = MimeTypes.AUDIO_AAC; initializationData = Collections.singletonList(getCodecPrivate(codecId)); AacUtil.Config aacConfig = AacUtil.parseAudioSpecificConfig(codecPrivate); // Update sampleRate and channelCount from the AudioSpecificConfig initialization data, // which is more reliable. See [Internal: b/10903778]. sampleRate = aacConfig.sampleRateHz; channelCount = aacConfig.channelCount; codecs = aacConfig.codecs; break; case CODEC_ID_MP2: mimeType = MimeTypes.AUDIO_MPEG_L2; maxInputSize = MpegAudioUtil.MAX_FRAME_SIZE_BYTES; break; case CODEC_ID_MP3: mimeType = MimeTypes.AUDIO_MPEG; maxInputSize = MpegAudioUtil.MAX_FRAME_SIZE_BYTES; break; case CODEC_ID_AC3: mimeType = MimeTypes.AUDIO_AC3; break; case CODEC_ID_E_AC3: mimeType = MimeTypes.AUDIO_E_AC3; break; case CODEC_ID_TRUEHD: mimeType = MimeTypes.AUDIO_TRUEHD; trueHdSampleRechunker = new TrueHdSampleRechunker(); break; case CODEC_ID_DTS: case CODEC_ID_DTS_EXPRESS: mimeType = MimeTypes.AUDIO_DTS; break; case CODEC_ID_DTS_LOSSLESS: mimeType = MimeTypes.AUDIO_DTS_HD; break; case CODEC_ID_FLAC: mimeType = MimeTypes.AUDIO_FLAC; initializationData = Collections.singletonList(getCodecPrivate(codecId)); break; case CODEC_ID_ACM: mimeType = MimeTypes.AUDIO_RAW; if (parseMsAcmCodecPrivate(new ParsableByteArray(getCodecPrivate(codecId)))) { pcmEncoding = Util.getPcmEncoding(audioBitDepth); if (pcmEncoding == C.ENCODING_INVALID) { pcmEncoding = Format.NO_VALUE; mimeType = MimeTypes.AUDIO_UNKNOWN; Log.w( TAG, "Unsupported PCM bit depth: " + audioBitDepth + ". Setting mimeType to " + mimeType); } } else { mimeType = MimeTypes.AUDIO_UNKNOWN; Log.w(TAG, "Non-PCM MS/ACM is unsupported. Setting mimeType to " + mimeType); } break; case CODEC_ID_PCM_INT_LIT: mimeType = MimeTypes.AUDIO_RAW; pcmEncoding = Util.getPcmEncoding(audioBitDepth); if (pcmEncoding == C.ENCODING_INVALID) { pcmEncoding = Format.NO_VALUE; mimeType = MimeTypes.AUDIO_UNKNOWN; Log.w( TAG, "Unsupported little endian PCM bit depth: " + audioBitDepth + ". Setting mimeType to " + mimeType); } break; case CODEC_ID_PCM_INT_BIG: mimeType = MimeTypes.AUDIO_RAW; if (audioBitDepth == 8) { pcmEncoding = C.ENCODING_PCM_8BIT; } else if (audioBitDepth == 16) { pcmEncoding = C.ENCODING_PCM_16BIT_BIG_ENDIAN; } else { pcmEncoding = Format.NO_VALUE; mimeType = MimeTypes.AUDIO_UNKNOWN; Log.w( TAG, "Unsupported big endian PCM bit depth: " + audioBitDepth + ". Setting mimeType to " + mimeType); } break; case CODEC_ID_PCM_FLOAT: mimeType = MimeTypes.AUDIO_RAW; if (audioBitDepth == 32) { pcmEncoding = C.ENCODING_PCM_FLOAT; } else { pcmEncoding = Format.NO_VALUE; mimeType = MimeTypes.AUDIO_UNKNOWN; Log.w( TAG, "Unsupported floating point PCM bit depth: " + audioBitDepth + ". Setting mimeType to " + mimeType); } break; case CODEC_ID_SUBRIP: mimeType = MimeTypes.APPLICATION_SUBRIP; break; case CODEC_ID_ASS: mimeType = MimeTypes.TEXT_SSA; initializationData = ImmutableList.of(SSA_DIALOGUE_FORMAT, getCodecPrivate(codecId)); break; case CODEC_ID_VTT: mimeType = MimeTypes.TEXT_VTT; break; case CODEC_ID_VOBSUB: mimeType = MimeTypes.APPLICATION_VOBSUB; initializationData = ImmutableList.of(getCodecPrivate(codecId)); break; case CODEC_ID_PGS: mimeType = MimeTypes.APPLICATION_PGS; break; case CODEC_ID_DVBSUB: mimeType = MimeTypes.APPLICATION_DVBSUBS; // Init data: composition_page (2), ancillary_page (2) byte[] initializationDataBytes = new byte[4]; System.arraycopy(getCodecPrivate(codecId), 0, initializationDataBytes, 0, 4); initializationData = ImmutableList.of(initializationDataBytes); break; default: throw ParserException.createForMalformedContainer( "Unrecognized codec identifier.", /* cause= */ null); } if (dolbyVisionConfigBytes != null) { @Nullable DolbyVisionConfig dolbyVisionConfig = DolbyVisionConfig.parse(new ParsableByteArray(this.dolbyVisionConfigBytes)); if (dolbyVisionConfig != null) { codecs = dolbyVisionConfig.codecs; mimeType = MimeTypes.VIDEO_DOLBY_VISION; } } @C.SelectionFlags int selectionFlags = 0; selectionFlags |= flagDefault ? C.SELECTION_FLAG_DEFAULT : 0; selectionFlags |= flagForced ? C.SELECTION_FLAG_FORCED : 0; int type; Format.Builder formatBuilder = new Format.Builder(); // TODO: Consider reading the name elements of the tracks and, if present, incorporating them // into the trackId passed when creating the formats. if (MimeTypes.isAudio(mimeType)) { type = C.TRACK_TYPE_AUDIO; formatBuilder .setChannelCount(channelCount) .setSampleRate(sampleRate) .setPcmEncoding(pcmEncoding); } else if (MimeTypes.isVideo(mimeType)) { type = C.TRACK_TYPE_VIDEO; if (displayUnit == Track.DISPLAY_UNIT_PIXELS) { displayWidth = displayWidth == Format.NO_VALUE ? width : displayWidth; displayHeight = displayHeight == Format.NO_VALUE ? height : displayHeight; } float pixelWidthHeightRatio = Format.NO_VALUE; if (displayWidth != Format.NO_VALUE && displayHeight != Format.NO_VALUE) { pixelWidthHeightRatio = ((float) (height * displayWidth)) / (width * displayHeight); } @Nullable ColorInfo colorInfo = null; if (hasColorInfo) { @Nullable byte[] hdrStaticInfo = getHdrStaticInfo(); colorInfo = new ColorInfo(colorSpace, colorRange, colorTransfer, hdrStaticInfo); } int rotationDegrees = Format.NO_VALUE; if (name != null && TRACK_NAME_TO_ROTATION_DEGREES.containsKey(name)) { rotationDegrees = TRACK_NAME_TO_ROTATION_DEGREES.get(name); } if (projectionType == C.PROJECTION_RECTANGULAR && Float.compare(projectionPoseYaw, 0f) == 0 && Float.compare(projectionPosePitch, 0f) == 0) { // The range of projectionPoseRoll is [-180, 180]. if (Float.compare(projectionPoseRoll, 0f) == 0) { rotationDegrees = 0; } else if (Float.compare(projectionPosePitch, 90f) == 0) { rotationDegrees = 90; } else if (Float.compare(projectionPosePitch, -180f) == 0 || Float.compare(projectionPosePitch, 180f) == 0) { rotationDegrees = 180; } else if (Float.compare(projectionPosePitch, -90f) == 0) { rotationDegrees = 270; } } formatBuilder .setWidth(width) .setHeight(height) .setPixelWidthHeightRatio(pixelWidthHeightRatio) .setRotationDegrees(rotationDegrees) .setProjectionData(projectionData) .setStereoMode(stereoMode) .setColorInfo(colorInfo); } else if (MimeTypes.APPLICATION_SUBRIP.equals(mimeType) || MimeTypes.TEXT_SSA.equals(mimeType) || MimeTypes.TEXT_VTT.equals(mimeType) || MimeTypes.APPLICATION_VOBSUB.equals(mimeType) || MimeTypes.APPLICATION_PGS.equals(mimeType) || MimeTypes.APPLICATION_DVBSUBS.equals(mimeType)) { type = C.TRACK_TYPE_TEXT; } else { throw ParserException.createForMalformedContainer( "Unexpected MIME type.", /* cause= */ null); } if (name != null && !TRACK_NAME_TO_ROTATION_DEGREES.containsKey(name)) { formatBuilder.setLabel(name); } Format format = formatBuilder .setId(trackId) .setSampleMimeType(mimeType) .setMaxInputSize(maxInputSize) .setLanguage(language) .setSelectionFlags(selectionFlags) .setInitializationData(initializationData) .setCodecs(codecs) .setDrmInitData(drmInitData) .build(); this.output = output.track(number, type); this.output.format(format); } /** Forces any pending sample metadata to be flushed to the output. */ @RequiresNonNull("output") public void outputPendingSampleMetadata() { if (trueHdSampleRechunker != null) { trueHdSampleRechunker.outputPendingSampleMetadata(output, cryptoData); } } /** Resets any state stored in the track in response to a seek. */ public void reset() { if (trueHdSampleRechunker != null) { trueHdSampleRechunker.reset(); } } /** Returns the HDR Static Info as defined in CTA-861.3. */ @Nullable private byte[] getHdrStaticInfo() { // Are all fields present. if (primaryRChromaticityX == Format.NO_VALUE || primaryRChromaticityY == Format.NO_VALUE || primaryGChromaticityX == Format.NO_VALUE || primaryGChromaticityY == Format.NO_VALUE || primaryBChromaticityX == Format.NO_VALUE || primaryBChromaticityY == Format.NO_VALUE || whitePointChromaticityX == Format.NO_VALUE || whitePointChromaticityY == Format.NO_VALUE || maxMasteringLuminance == Format.NO_VALUE || minMasteringLuminance == Format.NO_VALUE) { return null; } byte[] hdrStaticInfoData = new byte[25]; ByteBuffer hdrStaticInfo = ByteBuffer.wrap(hdrStaticInfoData).order(ByteOrder.LITTLE_ENDIAN); hdrStaticInfo.put((byte) 0); // Type. hdrStaticInfo.putShort((short) ((primaryRChromaticityX * MAX_CHROMATICITY) + 0.5f)); hdrStaticInfo.putShort((short) ((primaryRChromaticityY * MAX_CHROMATICITY) + 0.5f)); hdrStaticInfo.putShort((short) ((primaryGChromaticityX * MAX_CHROMATICITY) + 0.5f)); hdrStaticInfo.putShort((short) ((primaryGChromaticityY * MAX_CHROMATICITY) + 0.5f)); hdrStaticInfo.putShort((short) ((primaryBChromaticityX * MAX_CHROMATICITY) + 0.5f)); hdrStaticInfo.putShort((short) ((primaryBChromaticityY * MAX_CHROMATICITY) + 0.5f)); hdrStaticInfo.putShort((short) ((whitePointChromaticityX * MAX_CHROMATICITY) + 0.5f)); hdrStaticInfo.putShort((short) ((whitePointChromaticityY * MAX_CHROMATICITY) + 0.5f)); hdrStaticInfo.putShort((short) (maxMasteringLuminance + 0.5f)); hdrStaticInfo.putShort((short) (minMasteringLuminance + 0.5f)); hdrStaticInfo.putShort((short) maxContentLuminance); hdrStaticInfo.putShort((short) maxFrameAverageLuminance); return hdrStaticInfoData; } /** * Builds initialization data for a {@link Format} from FourCC codec private data. * * @return The codec mime type and initialization data. If the compression type is not supported * then the mime type is set to {@link MimeTypes#VIDEO_UNKNOWN} and the initialization data * is {@code null}. * @throws ParserException If the initialization data could not be built. */ private static Pair<String, @NullableType List<byte[]>> parseFourCcPrivate( ParsableByteArray buffer) throws ParserException { try { buffer.skipBytes(16); // size(4), width(4), height(4), planes(2), bitcount(2). long compression = buffer.readLittleEndianUnsignedInt(); if (compression == FOURCC_COMPRESSION_DIVX) { return new Pair<>(MimeTypes.VIDEO_DIVX, null); } else if (compression == FOURCC_COMPRESSION_H263) { return new Pair<>(MimeTypes.VIDEO_H263, null); } else if (compression == FOURCC_COMPRESSION_VC1) { // Search for the initialization data from the end of the BITMAPINFOHEADER. The last 20 // bytes of which are: sizeImage(4), xPel/m (4), yPel/m (4), clrUsed(4), clrImportant(4). int startOffset = buffer.getPosition() + 20; byte[] bufferData = buffer.getData(); for (int offset = startOffset; offset < bufferData.length - 4; offset++) { if (bufferData[offset] == 0x00 && bufferData[offset + 1] == 0x00 && bufferData[offset + 2] == 0x01 && bufferData[offset + 3] == 0x0F) { // We've found the initialization data. byte[] initializationData = Arrays.copyOfRange(bufferData, offset, bufferData.length); return new Pair<>(MimeTypes.VIDEO_VC1, Collections.singletonList(initializationData)); } } throw ParserException.createForMalformedContainer( "Failed to find FourCC VC1 initialization data", /* cause= */ null); } } catch (ArrayIndexOutOfBoundsException e) { throw ParserException.createForMalformedContainer( "Error parsing FourCC private data", /* cause= */ null); } Log.w(TAG, "Unknown FourCC. Setting mimeType to " + MimeTypes.VIDEO_UNKNOWN); return new Pair<>(MimeTypes.VIDEO_UNKNOWN, null); } /** * Builds initialization data for a {@link Format} from Vorbis codec private data. * * @return The initialization data for the {@link Format}. * @throws ParserException If the initialization data could not be built. */ private static List<byte[]> parseVorbisCodecPrivate(byte[] codecPrivate) throws ParserException { try { if (codecPrivate[0] != 0x02) { throw ParserException.createForMalformedContainer( "Error parsing vorbis codec private", /* cause= */ null); } int offset = 1; int vorbisInfoLength = 0; while ((codecPrivate[offset] & 0xFF) == 0xFF) { vorbisInfoLength += 0xFF; offset++; } vorbisInfoLength += codecPrivate[offset++] & 0xFF; int vorbisSkipLength = 0; while ((codecPrivate[offset] & 0xFF) == 0xFF) { vorbisSkipLength += 0xFF; offset++; } vorbisSkipLength += codecPrivate[offset++] & 0xFF; if (codecPrivate[offset] != 0x01) { throw ParserException.createForMalformedContainer( "Error parsing vorbis codec private", /* cause= */ null); } byte[] vorbisInfo = new byte[vorbisInfoLength]; System.arraycopy(codecPrivate, offset, vorbisInfo, 0, vorbisInfoLength); offset += vorbisInfoLength; if (codecPrivate[offset] != 0x03) { throw ParserException.createForMalformedContainer( "Error parsing vorbis codec private", /* cause= */ null); } offset += vorbisSkipLength; if (codecPrivate[offset] != 0x05) { throw ParserException.createForMalformedContainer( "Error parsing vorbis codec private", /* cause= */ null); } byte[] vorbisBooks = new byte[codecPrivate.length - offset]; System.arraycopy(codecPrivate, offset, vorbisBooks, 0, codecPrivate.length - offset); List<byte[]> initializationData = new ArrayList<>(2); initializationData.add(vorbisInfo); initializationData.add(vorbisBooks); return initializationData; } catch (ArrayIndexOutOfBoundsException e) { throw ParserException.createForMalformedContainer( "Error parsing vorbis codec private", /* cause= */ null); } } /** * Parses an MS/ACM codec private, returning whether it indicates PCM audio. * * @return Whether the codec private indicates PCM audio. * @throws ParserException If a parsing error occurs. */ private static boolean parseMsAcmCodecPrivate(ParsableByteArray buffer) throws ParserException { try { int formatTag = buffer.readLittleEndianUnsignedShort(); if (formatTag == WAVE_FORMAT_PCM) { return true; } else if (formatTag == WAVE_FORMAT_EXTENSIBLE) { buffer.setPosition(WAVE_FORMAT_SIZE + 6); // unionSamples(2), channelMask(4) return buffer.readLong() == WAVE_SUBFORMAT_PCM.getMostSignificantBits() && buffer.readLong() == WAVE_SUBFORMAT_PCM.getLeastSignificantBits(); } else { return false; } } catch (ArrayIndexOutOfBoundsException e) { throw ParserException.createForMalformedContainer( "Error parsing MS/ACM codec private", /* cause= */ null); } } /** * Checks that the track has an output. * * <p>It is unfortunately not possible to mark {@link MatroskaExtractor#tracks} as only * containing tracks with output with the nullness checker. This method is used to check that * fact at runtime. */ @EnsuresNonNull("output") private void assertOutputInitialized() { checkNotNull(output); } @EnsuresNonNull("codecPrivate") private byte[] getCodecPrivate(String codecId) throws ParserException { if (codecPrivate == null) { throw ParserException.createForMalformedContainer( "Missing CodecPrivate for codec " + codecId, /* cause= */ null); } return codecPrivate; } } }
package org.fluentlenium.test.findby; import org.fluentlenium.core.FluentPage; import org.fluentlenium.core.annotation.Page; import org.fluentlenium.core.domain.FluentWebElement; import org.fluentlenium.test.IntegrationFluentTest; import org.junit.jupiter.api.Test; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindAll; import org.openqa.selenium.support.FindBy; import java.util.ArrayList; import java.util.List; import static org.assertj.core.api.Assertions.assertThat; /** * Check that @FindBy and @FindAll works with default Selenium elements. */ class SeleniumFindBy extends IntegrationFluentTest { @Page private PageIndex page; @Test void shouldFindByRetrieveElement() { page.go(); page.isAt(); assertThat(page.location.getText()).isEqualTo("Pharmacy"); } @Test void shouldFindByRetrieveList() { page.go(); page.isAt(); assertThat(page.smalls).hasSize(3); List<String> texts = new ArrayList<>(); for (WebElement e : page.smalls) { texts.add(e.getText()); } assertThat(texts).containsExactly("Small 1", "Small 2", "Small 3"); } @Test void shouldFindAllFindByRetrieveList() { page.go(); page.isAt(); assertThat(page.findAllElements).hasSize(4); List<String> texts = new ArrayList<>(); for (WebElement e : page.findAllElements) { texts.add(e.getText()); } assertThat(texts).containsExactly("Pharmacy", "Small 1", "Small 2", "Small 3"); } @Test void shouldFindByRetrievedObjectWorkForSeleniumActions() { page.go(); page.isAt(); assertThat(page.getText()).isEqualTo("This text should change on MouseOver"); page.hoverOverElement(); assertThat(page.getText()).isEqualTo("abc"); } private static class PageIndex extends FluentPage { @FindBy(id = "location") private WebElement location; @FindBy(className = "small") private List<WebElement> smalls; @FindAll({@FindBy(id = "location"), @FindBy(className = "small")}) private List<WebElement> findAllElements; @FindBy(css = "#mouseover") private FluentWebElement mouseOverElement; @FindBy(css = "#id3") private FluentWebElement id3; @Override public String getUrl() { return IntegrationFluentTest.DEFAULT_URL; } @Override public void isAt() { assertThat(getDriver().getTitle()).contains("Selenium"); } void hoverOverElement() { mouseOverElement.mouse().moveToElement(); } public String getText() { return id3.text(); } } }
/******************************************************************************* * Copyright [2014] [Joarder Kamal] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ /** * Source: http://www.cs.waikato.ac.nz/~abifet/MOA-IncMine/ */ package main.java.incmine.tasks; import moa.core.ObjectRepository; import main.java.incmine.learners.Learner; import moa.options.ClassOption; import moa.options.IntOption; import moa.streams.InstanceStream; import moa.tasks.MainTask; import moa.tasks.TaskMonitor; public abstract class LearnModel extends MainTask { @Override public String getPurposeString() { return "Learns a model from a stream."; } private static final long serialVersionUID = 1L; public ClassOption learnerOption = new ClassOption("learner", 'l', "Classifier to train.", Learner.class, "IncMine"); public ClassOption streamOption = new ClassOption("stream", 's', "Stream to learn from.", InstanceStream.class, "ArffFileStream"); public IntOption maxInstancesOption = new IntOption("maxInstances", 'm', "Maximum number of instances to train on per pass over the data.", 1000, 0, Integer.MAX_VALUE); public IntOption numPassesOption = new IntOption("numPasses", 'p', "The number of passes to do over the data.", 1, 1, Integer.MAX_VALUE); public IntOption maxMemoryOption = new IntOption("maxMemory", 'b', "Maximum size of model (in bytes). -1 = no limit.", -1, -1, Integer.MAX_VALUE); public LearnModel() { } public LearnModel(Learner learner, InstanceStream stream, int maxInstances, int numPasses) { this.learnerOption.setCurrentObject(learner); this.streamOption.setCurrentObject(stream); this.maxInstancesOption.setValue(maxInstances); this.numPassesOption.setValue(numPasses); } public Class<?> getTaskResultType() { return Learner.class; } @Override public Object doMainTask(TaskMonitor monitor, ObjectRepository repository) { Learner learner = (Learner) getPreparedClassOption(this.learnerOption); InstanceStream stream = (InstanceStream) getPreparedClassOption(this.streamOption); learner.setModelContext(stream.getHeader()); int numPasses = this.numPassesOption.getValue(); int maxInstances = this.maxInstancesOption.getValue(); for (int pass = 0; pass < numPasses; pass++) { long instancesProcessed = 0; monitor.setCurrentActivity("Training learner" + (numPasses > 1 ? (" (pass " + (pass + 1) + "/" + numPasses + ")") : "") + "...", -1.0); /*if (pass > 0) { stream.restart(); }*/ while (stream.hasMoreInstances() && ((maxInstances < 0) || (instancesProcessed < maxInstances))) { // Read instance one by one learner.trainOnInstance(stream.nextInstance()); instancesProcessed++; if (instancesProcessed % INSTANCES_BETWEEN_MONITOR_UPDATES == 0) { //System.out.println("inst_betw_mon_upd = 0 instProcess" + instancesProcessed); if (monitor.taskShouldAbort()) { return null; } long estimatedRemainingInstances = stream.estimatedRemainingInstances(); if (maxInstances > 0) { long maxRemaining = maxInstances - instancesProcessed; if ((estimatedRemainingInstances < 0) || (maxRemaining < estimatedRemainingInstances)) { estimatedRemainingInstances = maxRemaining; } } monitor.setCurrentActivityFractionComplete(estimatedRemainingInstances < 0 ? -1.0 : (double) instancesProcessed / (double) (instancesProcessed + estimatedRemainingInstances)); if (monitor.resultPreviewRequested()) { monitor.setLatestResultPreview(learner.copy()); } } } } learner.setModelContext(stream.getHeader()); return learner; } }
package org.hl7.fhir.r4.model.codesystems; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Sun, May 6, 2018 17:51-0400 for FHIR v3.4.0 import org.hl7.fhir.r4.model.EnumFactory; public class MeasureScoringEnumFactory implements EnumFactory<MeasureScoring> { public MeasureScoring fromCode(String codeString) throws IllegalArgumentException { if (codeString == null || "".equals(codeString)) return null; if ("proportion".equals(codeString)) return MeasureScoring.PROPORTION; if ("ratio".equals(codeString)) return MeasureScoring.RATIO; if ("continuous-variable".equals(codeString)) return MeasureScoring.CONTINUOUSVARIABLE; if ("cohort".equals(codeString)) return MeasureScoring.COHORT; throw new IllegalArgumentException("Unknown MeasureScoring code '"+codeString+"'"); } public String toCode(MeasureScoring code) { if (code == MeasureScoring.PROPORTION) return "proportion"; if (code == MeasureScoring.RATIO) return "ratio"; if (code == MeasureScoring.CONTINUOUSVARIABLE) return "continuous-variable"; if (code == MeasureScoring.COHORT) return "cohort"; return "?"; } public String toSystem(MeasureScoring code) { return code.getSystem(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.spring.namespace; import org.apache.shardingsphere.driver.jdbc.core.datasource.ShardingSphereDataSource; import org.apache.shardingsphere.encrypt.rule.EncryptRule; import org.apache.shardingsphere.infra.database.DefaultSchema; import org.apache.shardingsphere.infra.datanode.DataNode; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; import org.apache.shardingsphere.readwritesplitting.rule.ReadwriteSplittingRule; import org.apache.shardingsphere.sharding.rule.ShardingRule; import org.apache.shardingsphere.spring.transaction.TransactionTypeScanner; import org.junit.Test; import org.springframework.test.context.junit4.AbstractJUnit4SpringContextTests; import javax.annotation.Resource; import javax.sql.DataSource; import java.util.Arrays; import java.util.Collection; import java.util.Map; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; public abstract class AbstractSpringNamespaceTest extends AbstractJUnit4SpringContextTests { @Resource private ShardingSphereDataSource dataSource; @Test public void assertShardingSphereDataSource() { assertDataSources(); Collection<ShardingSphereRule> rules = dataSource.getContextManager().getMetaDataContexts().getMetaData(DefaultSchema.LOGIC_NAME).getRuleMetaData().getRules(); assertThat(rules.size(), is(4)); for (ShardingSphereRule each : rules) { if (each instanceof ShardingRule) { assertShardingRule((ShardingRule) each); } else if (each instanceof ReadwriteSplittingRule) { assertReadwriteSplittingRule((ReadwriteSplittingRule) each); } else if (each instanceof EncryptRule) { assertEncryptRule((EncryptRule) each); } } } private void assertDataSources() { Map<String, DataSource> dataSources = dataSource.getContextManager().getMetaDataContexts().getMetaData(DefaultSchema.LOGIC_NAME).getResource().getDataSources(); assertThat(dataSources.size(), is(6)); assertTrue(dataSources.containsKey("ds_0_write")); assertTrue(dataSources.containsKey("ds_0_read_0")); assertTrue(dataSources.containsKey("ds_0_read_1")); assertTrue(dataSources.containsKey("ds_1_write")); assertTrue(dataSources.containsKey("ds_1_read_0")); assertTrue(dataSources.containsKey("ds_1_read_1")); } private void assertShardingRule(final ShardingRule rule) { assertThat(rule.getTableRules().size(), is(1)); assertThat(rule.getTableRule("t_order").getActualDataNodes(), is(Arrays.asList( new DataNode("ds_0.t_order_0"), new DataNode("ds_0.t_order_1"), new DataNode("ds_0.t_order_2"), new DataNode("ds_0.t_order_3"), new DataNode("ds_1.t_order_0"), new DataNode("ds_1.t_order_1"), new DataNode("ds_1.t_order_2"), new DataNode("ds_1.t_order_3")))); } private void assertReadwriteSplittingRule(final ReadwriteSplittingRule rule) { assertTrue(rule.findDataSourceRule("ds_0").isPresent()); assertThat(rule.findDataSourceRule("ds_0").get().getWriteDataSourceName(), is("ds_0_write")); assertThat(rule.findDataSourceRule("ds_0").get().getReadDataSourceNames(), is(Arrays.asList("ds_0_read_0", "ds_0_read_1"))); assertTrue(rule.findDataSourceRule("ds_1").isPresent()); assertThat(rule.findDataSourceRule("ds_1").get().getWriteDataSourceName(), is("ds_1_write")); assertThat(rule.findDataSourceRule("ds_1").get().getReadDataSourceNames(), is(Arrays.asList("ds_1_read_0", "ds_1_read_1"))); } private void assertEncryptRule(final EncryptRule rule) { assertThat(rule.getCipherColumn("t_order", "pwd"), is("pwd_cipher")); assertTrue(rule.findEncryptor(DefaultSchema.LOGIC_NAME, "t_order", "pwd").isPresent()); assertThat(rule.findEncryptor(DefaultSchema.LOGIC_NAME, "t_order", "pwd").get().getProps().getProperty("aes-key-value"), is("123456")); } @Test public void assertTransactionTypeScanner() { assertNotNull(applicationContext.getBean(TransactionTypeScanner.class)); } }
package com.mobgen.halo.android.framework.storage.database.dsl; /** * The table base interface for all the Tables of the HALO ORM framework. */ public interface HaloTable { }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v2.services; import com.google.ads.googleads.v2.resources.CampaignAudienceView; import com.google.ads.googleads.v2.services.stub.CampaignAudienceViewServiceStubSettings; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.ClientSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import java.io.IOException; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link CampaignAudienceViewServiceClient}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (googleads.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of getCampaignAudienceView to 30 seconds: * * <pre> * <code> * CampaignAudienceViewServiceSettings.Builder campaignAudienceViewServiceSettingsBuilder = * CampaignAudienceViewServiceSettings.newBuilder(); * campaignAudienceViewServiceSettingsBuilder * .getCampaignAudienceViewSettings() * .setRetrySettings( * campaignAudienceViewServiceSettingsBuilder.getCampaignAudienceViewSettings().getRetrySettings().toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * CampaignAudienceViewServiceSettings campaignAudienceViewServiceSettings = campaignAudienceViewServiceSettingsBuilder.build(); * </code> * </pre> */ @Generated("by gapic-generator") @BetaApi public class CampaignAudienceViewServiceSettings extends ClientSettings<CampaignAudienceViewServiceSettings> { /** Returns the object with the settings used for calls to getCampaignAudienceView. */ public UnaryCallSettings<GetCampaignAudienceViewRequest, CampaignAudienceView> getCampaignAudienceViewSettings() { return ((CampaignAudienceViewServiceStubSettings) getStubSettings()) .getCampaignAudienceViewSettings(); } public static final CampaignAudienceViewServiceSettings create( CampaignAudienceViewServiceStubSettings stub) throws IOException { return new CampaignAudienceViewServiceSettings.Builder(stub.toBuilder()).build(); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return CampaignAudienceViewServiceStubSettings.defaultExecutorProviderBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return CampaignAudienceViewServiceStubSettings.getDefaultEndpoint(); } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return CampaignAudienceViewServiceStubSettings.getDefaultServiceScopes(); } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return CampaignAudienceViewServiceStubSettings.defaultCredentialsProviderBuilder(); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return CampaignAudienceViewServiceStubSettings.defaultGrpcTransportProviderBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return CampaignAudienceViewServiceStubSettings.defaultTransportChannelProvider(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return CampaignAudienceViewServiceStubSettings.defaultApiClientHeaderProviderBuilder(); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected CampaignAudienceViewServiceSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); } /** Builder for CampaignAudienceViewServiceSettings. */ public static class Builder extends ClientSettings.Builder<CampaignAudienceViewServiceSettings, Builder> { protected Builder() throws IOException { this((ClientContext) null); } protected Builder(ClientContext clientContext) { super(CampaignAudienceViewServiceStubSettings.newBuilder(clientContext)); } private static Builder createDefault() { return new Builder(CampaignAudienceViewServiceStubSettings.newBuilder()); } protected Builder(CampaignAudienceViewServiceSettings settings) { super(settings.getStubSettings().toBuilder()); } protected Builder(CampaignAudienceViewServiceStubSettings.Builder stubSettings) { super(stubSettings); } public CampaignAudienceViewServiceStubSettings.Builder getStubSettingsBuilder() { return ((CampaignAudienceViewServiceStubSettings.Builder) getStubSettings()); } // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) throws Exception { super.applyToAllUnaryMethods( getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater); return this; } /** Returns the builder for the settings used for calls to getCampaignAudienceView. */ public UnaryCallSettings.Builder<GetCampaignAudienceViewRequest, CampaignAudienceView> getCampaignAudienceViewSettings() { return getStubSettingsBuilder().getCampaignAudienceViewSettings(); } @Override public CampaignAudienceViewServiceSettings build() throws IOException { return new CampaignAudienceViewServiceSettings(this); } } }
package org.nfpa.spatial.utils; import java.lang.reflect.Field; public class Utils { /* * Parse Integer from String * */ public static int parseToInt(String stringToParse, int defaultValue) { int ret; try { ret = Integer.parseInt(stringToParse); } catch(NumberFormatException ex) { ret = defaultValue; //Use default value if parsing failed } return ret; } /* * Load shared library at run time * */ public static void loadLibPostal(String libpostalPath) throws NoSuchFieldException, IllegalAccessException { System.setProperty("java.library.path", libpostalPath); final Field sysPathsField = ClassLoader.class.getDeclaredField("sys_paths"); sysPathsField.setAccessible(true); sysPathsField.set(null, null); } }
/* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.service.support; import com.thoughtworks.go.serverhealth.ServerHealthService; import com.thoughtworks.go.serverhealth.ServerHealthState; import com.thoughtworks.go.serverhealth.ServerHealthStates; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; /** * @understands Dumping all the server health messages */ @Component public class ServerHealthInformationProvider implements ServerInfoProvider { private ServerHealthService service; @Override public double priority() { return 6.0; } @Autowired public ServerHealthInformationProvider(ServerHealthService service) { this.service = service; } @Override public Map<String, Object> asJson() { LinkedHashMap<String, Object> json = new LinkedHashMap<>(); ServerHealthStates allLogs = service.logs(); json.put("Messages Count", allLogs.size()); ArrayList<Map<String, String>> messages = new ArrayList<>(); for (ServerHealthState log : allLogs) { messages.add(log.asJson()); } json.put("Messages", messages); return json; } @Override public String name() { return "Server Health Information"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.runtime.utils; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeSet; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import org.apache.asterix.common.api.IClusterManagementWork.ClusterState; import org.apache.asterix.common.cluster.ClusterPartition; import org.apache.asterix.common.cluster.IClusterStateManager; import org.apache.asterix.common.dataflow.ICcApplicationContext; import org.apache.asterix.common.exceptions.AsterixException; import org.apache.asterix.common.exceptions.ErrorCode; import org.apache.asterix.common.replication.INcLifecycleCoordinator; import org.apache.asterix.common.transactions.IResourceIdManager; import org.apache.asterix.common.utils.NcLocalCounters; import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint; import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.api.config.IOption; import org.apache.hyracks.api.config.Section; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.api.exceptions.HyracksException; import org.apache.hyracks.control.cc.ClusterControllerService; import org.apache.hyracks.control.common.application.ConfigManagerApplicationConfig; import org.apache.hyracks.control.common.config.ConfigManager; import org.apache.hyracks.control.common.controllers.NCConfig; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; /** * A holder class for properties related to the Asterix cluster. */ public class ClusterStateManager implements IClusterStateManager { private static final Logger LOGGER = LogManager.getLogger(); private final Map<String, Map<IOption, Object>> ncConfigMap = new HashMap<>(); private Set<String> pendingRemoval = new HashSet<>(); private ClusterState state = ClusterState.UNUSABLE; private AlgebricksAbsolutePartitionConstraint clusterPartitionConstraint; private Map<String, ClusterPartition[]> node2PartitionsMap; private SortedMap<Integer, ClusterPartition> clusterPartitions; private String currentMetadataNode = null; private boolean metadataNodeActive = false; private Set<String> failedNodes = new HashSet<>(); private Set<String> participantNodes = new HashSet<>(); private INcLifecycleCoordinator lifecycleCoordinator; private ICcApplicationContext appCtx; private ClusterPartition metadataPartition; private boolean rebalanceRequired; @Override public void setCcAppCtx(ICcApplicationContext appCtx) { this.appCtx = appCtx; node2PartitionsMap = appCtx.getMetadataProperties().getNodePartitions(); clusterPartitions = appCtx.getMetadataProperties().getClusterPartitions(); currentMetadataNode = appCtx.getMetadataProperties().getMetadataNodeName(); metadataPartition = node2PartitionsMap.get(currentMetadataNode)[0]; lifecycleCoordinator = appCtx.getNcLifecycleCoordinator(); lifecycleCoordinator.bindTo(this); } @Override public synchronized void notifyNodeFailure(String nodeId) throws HyracksException { if (LOGGER.isInfoEnabled()) { LOGGER.info("Removing configuration parameters for node id " + nodeId); } failedNodes.add(nodeId); ncConfigMap.remove(nodeId); pendingRemoval.remove(nodeId); lifecycleCoordinator.notifyNodeFailure(nodeId); } @Override public synchronized void notifyNodeJoin(String nodeId, Map<IOption, Object> configuration) throws HyracksException { if (LOGGER.isInfoEnabled()) { LOGGER.info("Registering configuration parameters for node id " + nodeId); } failedNodes.remove(nodeId); ncConfigMap.put(nodeId, configuration); updateNodeConfig(nodeId, configuration); lifecycleCoordinator.notifyNodeJoin(nodeId); } @Override public synchronized void setState(ClusterState state) { if (this.state == state) { LOGGER.info("ignoring update to same cluster state of " + this.state); return; } LOGGER.info("updating cluster state from " + this.state + " to " + state.name()); this.state = state; appCtx.getGlobalRecoveryManager().notifyStateChange(state); LOGGER.info("Cluster State is now " + state.name()); // Notify any waiting threads for the cluster state to change. notifyAll(); } @Override public synchronized void updateMetadataNode(String nodeId, boolean active) { currentMetadataNode = nodeId; metadataNodeActive = active; if (active) { metadataPartition.setActiveNodeId(currentMetadataNode); LOGGER.info("Metadata node {} is now active", currentMetadataNode); } notifyAll(); } @Override public synchronized void updateNodeState(String nodeId, boolean active, NcLocalCounters localCounters) { if (active) { updateClusterCounters(nodeId, localCounters); participantNodes.add(nodeId); } else { participantNodes.remove(nodeId); } ClusterPartition[] nodePartitions = node2PartitionsMap.get(nodeId); // if this isn't a storage node, it will not have cluster partitions if (nodePartitions != null) { for (ClusterPartition p : nodePartitions) { updateClusterPartition(p.getPartitionId(), nodeId, active); } } } @Override public synchronized void updateClusterPartition(int partitionNum, String activeNode, boolean active) { ClusterPartition clusterPartition = clusterPartitions.get(partitionNum); if (clusterPartition != null) { // set the active node for this node's partitions clusterPartition.setActive(active); if (active) { clusterPartition.setActiveNodeId(activeNode); clusterPartition.setPendingActivation(false); } notifyAll(); } } @Override public synchronized void refreshState() throws HyracksDataException { if (state == ClusterState.SHUTTING_DOWN) { LOGGER.info("Not refreshing final state {}", state); return; } resetClusterPartitionConstraint(); // if the cluster has no registered partitions or all partitions are pending activation -> UNUSABLE if (clusterPartitions.isEmpty() || clusterPartitions.values().stream().allMatch(ClusterPartition::isPendingActivation)) { LOGGER.info("Cluster does not have any registered partitions"); setState(ClusterState.UNUSABLE); return; } // exclude partitions that are pending activation if (clusterPartitions.values().stream().anyMatch(p -> !p.isActive() && !p.isPendingActivation())) { setState(ClusterState.UNUSABLE); return; } // the metadata bootstrap & global recovery must be complete before the cluster can be active if (!metadataNodeActive) { setState(ClusterState.PENDING); return; } if (state != ClusterState.ACTIVE && state != ClusterState.RECOVERING) { setState(ClusterState.PENDING); } appCtx.getMetadataBootstrap().init(); if (!appCtx.getGlobalRecoveryManager().isRecoveryCompleted()) { // start global recovery setState(ClusterState.RECOVERING); appCtx.getGlobalRecoveryManager().startGlobalRecovery(appCtx); return; } if (rebalanceRequired) { setState(ClusterState.REBALANCE_REQUIRED); return; } // finally- life is good, set the state to ACTIVE setState(ClusterState.ACTIVE); } @Override public synchronized void waitForState(ClusterState waitForState) throws InterruptedException { while (state != waitForState) { wait(); } } @Override public boolean waitForState(ClusterState waitForState, long timeout, TimeUnit unit) throws InterruptedException { return waitForState(waitForState::equals, timeout, unit) != null; } @Override public synchronized ClusterState waitForState(Predicate<ClusterState> predicate, long timeout, TimeUnit unit) throws InterruptedException { final long startMillis = System.currentTimeMillis(); final long endMillis = startMillis + unit.toMillis(timeout); while (!predicate.test(state)) { long millisToSleep = endMillis - System.currentTimeMillis(); if (millisToSleep > 0) { wait(millisToSleep); } else { return null; } } return state; } @Override public synchronized String[] getIODevices(String nodeId) { Map<IOption, Object> ncConfig = ncConfigMap.get(nodeId); if (ncConfig == null) { if (LOGGER.isWarnEnabled()) { LOGGER.warn("Configuration parameters for nodeId " + nodeId + " not found. The node has not joined yet or has left."); } return new String[0]; } return (String[]) ncConfig.get(NCConfig.Option.IODEVICES); } @Override public synchronized ClusterState getState() { return state; } @Override public synchronized Set<String> getParticipantNodes() { return new HashSet<>(participantNodes); } @Override public synchronized Set<String> getFailedNodes() { return new HashSet<>(failedNodes); } @Override public synchronized Set<String> getNodes() { Set<String> nodes = new HashSet<>(participantNodes); nodes.addAll(failedNodes); return nodes; } @Override public synchronized Set<String> getParticipantNodes(boolean excludePendingRemoval) { final Set<String> participantNodesCopy = getParticipantNodes(); if (excludePendingRemoval) { participantNodesCopy.removeAll(pendingRemoval); } return participantNodesCopy; } @Override public synchronized AlgebricksAbsolutePartitionConstraint getClusterLocations() { if (clusterPartitionConstraint == null) { resetClusterPartitionConstraint(); } return clusterPartitionConstraint; } private synchronized void resetClusterPartitionConstraint() { ArrayList<String> clusterActiveLocations = new ArrayList<>(); for (ClusterPartition p : clusterPartitions.values()) { if (p.isActive()) { clusterActiveLocations.add(p.getActiveNodeId()); } } clusterActiveLocations.removeAll(pendingRemoval); clusterPartitionConstraint = new AlgebricksAbsolutePartitionConstraint(clusterActiveLocations.toArray(new String[] {})); } @Override public synchronized boolean isClusterActive() { return state == ClusterState.ACTIVE; } @Override public synchronized int getNumberOfNodes() { return participantNodes.size(); } @Override public synchronized ClusterPartition[] getNodePartitions(String nodeId) { return node2PartitionsMap.get(nodeId); } @Override public synchronized int getNodePartitionsCount(String node) { if (node2PartitionsMap.containsKey(node)) { return node2PartitionsMap.get(node).length; } return 0; } @Override public synchronized ClusterPartition[] getClusterPartitons() { return clusterPartitions.values().toArray(new ClusterPartition[] {}); } @Override public synchronized boolean isMetadataNodeActive() { return metadataNodeActive; } @Override public synchronized ObjectNode getClusterStateDescription() { ObjectMapper om = new ObjectMapper(); ObjectNode stateDescription = om.createObjectNode(); stateDescription.put("state", state.name()); stateDescription.put("metadata_node", currentMetadataNode); ArrayNode ncs = om.createArrayNode(); stateDescription.set("ncs", ncs); for (String node : new TreeSet<>(node2PartitionsMap.keySet())) { ObjectNode nodeJSON = om.createObjectNode(); nodeJSON.put("node_id", node); boolean allActive = true; boolean anyActive = false; Set<Map<String, Object>> partitions = new HashSet<>(); if (node2PartitionsMap.containsKey(node)) { for (ClusterPartition part : node2PartitionsMap.get(node)) { HashMap<String, Object> partition = new HashMap<>(); partition.put("partition_id", "partition_" + part.getPartitionId()); partition.put("active", part.isActive()); partitions.add(partition); allActive = allActive && part.isActive(); if (allActive) { anyActive = true; } } } nodeJSON.put("state", failedNodes.contains(node) ? "FAILED" : allActive && anyActive ? "ACTIVE" : anyActive ? "PARTIALLY_ACTIVE" : "INACTIVE"); nodeJSON.putPOJO("partitions", partitions); ncs.add(nodeJSON); } return stateDescription; } @Override public synchronized ObjectNode getClusterStateSummary() { ObjectMapper om = new ObjectMapper(); ObjectNode stateDescription = om.createObjectNode(); stateDescription.put("state", state.name()); stateDescription.putPOJO("metadata_node", currentMetadataNode); stateDescription.putPOJO("partitions", clusterPartitions); return stateDescription; } @Override public Map<String, Map<IOption, Object>> getNcConfiguration() { return Collections.unmodifiableMap(ncConfigMap); } @Override public String getCurrentMetadataNodeId() { return currentMetadataNode; } @Override public synchronized void registerNodePartitions(String nodeId, ClusterPartition[] nodePartitions) throws AlgebricksException { if (LOGGER.isInfoEnabled()) { LOGGER.info("Registering node partitions for node " + nodeId + ": " + Arrays.toString(nodePartitions)); } // We want to make sure there are no conflicts; make two passes for simplicity... for (ClusterPartition nodePartition : nodePartitions) { if (clusterPartitions.containsKey(nodePartition.getPartitionId())) { throw AsterixException.create(ErrorCode.DUPLICATE_PARTITION_ID, nodePartition.getPartitionId(), nodeId, clusterPartitions.get(nodePartition.getPartitionId()).getNodeId()); } } for (ClusterPartition nodePartition : nodePartitions) { nodePartition.setPendingActivation(true); clusterPartitions.put(nodePartition.getPartitionId(), nodePartition); } node2PartitionsMap.put(nodeId, nodePartitions); } @Override public synchronized void deregisterNodePartitions(String nodeId) throws HyracksDataException { ClusterPartition[] nodePartitions = node2PartitionsMap.remove(nodeId); if (nodePartitions == null) { LOGGER.info("deregisterNodePartitions unknown node " + nodeId + " (already removed?)"); } else { if (LOGGER.isInfoEnabled()) { LOGGER.info("deregisterNodePartitions for node " + nodeId + ": " + Arrays.toString(nodePartitions)); } for (ClusterPartition nodePartition : nodePartitions) { clusterPartitions.remove(nodePartition.getPartitionId()); } participantNodes.remove(nodeId); } } @Override public synchronized void removePending(String nodeId) { if (LOGGER.isInfoEnabled()) { LOGGER.info("Registering intention to remove node id " + nodeId); } if (participantNodes.contains(nodeId)) { pendingRemoval.add(nodeId); } else { LOGGER.warn("Cannot register unknown node " + nodeId + " for pending removal"); } } @Override public synchronized boolean cancelRemovePending(String nodeId) { if (LOGGER.isInfoEnabled()) { LOGGER.info("Deregistering intention to remove node id " + nodeId); } if (!pendingRemoval.remove(nodeId)) { LOGGER.warn("Cannot deregister intention to remove node id " + nodeId + " that was not registered"); return false; } else { return true; } } @Override public Map<String, Map<IOption, Object>> getActiveNcConfiguration() { return ncConfigMap; } public synchronized Set<String> getNodesPendingRemoval() { return new HashSet<>(pendingRemoval); } @Override public synchronized void setMetadataPartitionId(ClusterPartition partition) { metadataPartition = partition; } @Override public synchronized ClusterPartition getMetadataPartition() { return metadataPartition; } @Override public synchronized void setRebalanceRequired(boolean rebalanceRequired) throws HyracksDataException { this.rebalanceRequired = rebalanceRequired; refreshState(); } private void updateClusterCounters(String nodeId, NcLocalCounters localCounters) { final IResourceIdManager resourceIdManager = appCtx.getResourceIdManager(); resourceIdManager.report(nodeId, localCounters.getMaxResourceId()); appCtx.getTxnIdFactory().ensureMinimumId(localCounters.getMaxTxnId()); ((ClusterControllerService) appCtx.getServiceContext().getControllerService()).getJobIdFactory() .setMaxJobId(localCounters.getMaxJobId()); } private void updateNodeConfig(String nodeId, Map<IOption, Object> configuration) { ConfigManager configManager = ((ConfigManagerApplicationConfig) appCtx.getServiceContext().getAppConfig()).getConfigManager(); configuration.forEach((key, value) -> { if (key.section() == Section.NC) { configManager.set(nodeId, key, value); } }); } }
package com.infra.managers.utils; import com.infra.BuildConfig; import com.infra.managers.DataManager; import com.infra.managers.requests.Service; import com.infra.logging.util.LogUtils; import java.io.IOException; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.concurrent.TimeUnit; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSession; import javax.net.ssl.SSLSocketFactory; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import okhttp3.Headers; import okhttp3.Interceptor; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.Response; import retrofit2.Converter; import retrofit2.Retrofit; /** * Created by hetashah on 7/15/16. */ public class ServiceUtil { private static final String TAG = ServiceUtil.class.getName(); //TODO: The timeouts should be determined based on the network type (cellular vs Wifi) and speed public static final int CONNECT_TIMEOUT_SECONDS = 10; public static final int WRITE_TIMEOUT_SECONDS = 5; public static final int READ_TIMEOUT_SECONDS = 5; public static DataManager.RetrofitServicePair buildService(Converter.Factory converterFactory) { OkHttpClient.Builder okHttpBuilder = new OkHttpClient.Builder() .connectTimeout(CONNECT_TIMEOUT_SECONDS, TimeUnit.SECONDS) .writeTimeout(WRITE_TIMEOUT_SECONDS, TimeUnit.SECONDS) .readTimeout(READ_TIMEOUT_SECONDS, TimeUnit.SECONDS) .addInterceptor(new Interceptor() { @Override public Response intercept(Chain chain) throws IOException { Request request = chain.request(); Request newRequest; newRequest = request.newBuilder() .headers(buildRequestHeaders()) .build(); return chain.proceed(newRequest); } }); // !!! ONLY FOR DEBUG BUILDS !!! // See the CharlesProxy setup instructions: https://docs.google.com/document/d/1X3cFGtnxVcIokADMacOeoUsivf5n5VqyF8TR-MNfYQw/edit#heading=h.c44usz4sjqe7 // Accept all certificates (including CharlesProxy) to be able to inspect the service network traffic if(BuildConfig.DEBUG) { disableTLSHandshake(okHttpBuilder); } Retrofit retrofit = new Retrofit.Builder() .client(okHttpBuilder .build()) .baseUrl("YOUR_API_END_POINT") //TODO: Needs to replace with you end point .addConverterFactory(converterFactory) .build(); return new DataManager.RetrofitServicePair(retrofit, retrofit.create(Service.class)); } /** Use this only in DEBUG mode!!! */ private static void disableTLSHandshake(OkHttpClient.Builder okHttpBuilder) { // Create a trust manager that does not validate certificate chains final TrustManager[] trustAllCerts = new TrustManager[] { new X509TrustManager() { @Override public void checkClientTrusted(java.security.cert.X509Certificate[] chain, String authType) throws CertificateException {} @Override public void checkServerTrusted(java.security.cert.X509Certificate[] chain, String authType) throws CertificateException {} @Override public java.security.cert.X509Certificate[] getAcceptedIssuers() {return new java.security.cert.X509Certificate[]{};} } }; try { // Install the all-trusting trust manager final SSLContext sslContext = SSLContext.getInstance("SSL"); sslContext.init(null, trustAllCerts, new java.security.SecureRandom()); // Create an ssl socket factory with our all-trusting manager final SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory(); okHttpBuilder.sslSocketFactory(sslSocketFactory); okHttpBuilder.hostnameVerifier(new HostnameVerifier() { @Override public boolean verify(String hostname, SSLSession session) { return true; } }); } catch(NoSuchAlgorithmException |KeyManagementException ex) { LogUtils.e(TAG, ex.getMessage(), ex); } } private static Headers buildRequestHeaders() { Map<String, String> headers = new HashMap<String, String>(); headers.put("Content-Type", "application/json"); /** * Logging request Id to back track user query. * This is very helpful for debugging. */ String requestId = UUID.randomUUID().toString(); LogUtils.i("X-Request-Id", requestId); headers.put("X-Request-Id", requestId); //headers.put("X-Device-Type", "Android"); //TODO: Add your Common headers here return Headers.of(headers); } }
/* * Copyright 2006-2021 Marcel Baumann * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations * under the License. */ package net.tangly.gleam; import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; class GleamTest { @Test void testNothingForJacoco() { // test added to avoid crash of Jacoco assertThat(true).isTrue(); } }
// // Source code recreated from a .class file by IntelliJ IDEA // (powered by Fernflower decompiler) // package com.bstek.urule.model.scorecard; public enum CellType { attribute, score, condition, custom; private CellType() { } }
package com.sustech.flightbooking.viewmodel; public class RegisterViewModel { private String userName; private String displayName; private String password; private String confirmPassword; private String identityNumber; public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getConfirmPassword() { return confirmPassword; } public void setConfirmPassword(String confirmPassword) { this.confirmPassword = confirmPassword; } public String getDisplayName() { return displayName; } public void setDisplayName(String displayName) { this.displayName = displayName; } public String getIdentityNumber() { return identityNumber; } public void setIdentityNumber(String identityNumber) { this.identityNumber = identityNumber; } }
/* * Copyright (c) 2011-2016, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.abst.tracker; import boofcv.abst.filter.derivative.ImageGradient; import boofcv.alg.filter.derivative.GImageDerivativeOps; import boofcv.alg.tracker.sfot.SfotConfig; import boofcv.alg.tracker.sfot.SparseFlowObjectTracker; import boofcv.factory.filter.derivative.FactoryDerivative; import boofcv.struct.image.GrayU8; import boofcv.struct.image.ImageDataType; import boofcv.struct.image.ImageType; /** * @author Peter Abeles */ public class TestSfot_to_TrackObjectQuad extends TextureGrayTrackerObjectRectangleTests { @Override public TrackerObjectQuad<GrayU8> create(ImageType<GrayU8> imageType) { Class ct = ImageDataType.typeToSingleClass(imageType.getDataType()); Class dt = GImageDerivativeOps.getDerivativeType(ct); ImageGradient gradient = FactoryDerivative.sobel(ct, dt); SfotConfig config = new SfotConfig(); SparseFlowObjectTracker tracker = new SparseFlowObjectTracker(config,ct,dt,gradient); return new Sfot_to_TrackObjectQuad(tracker,ct); } }
package org.onvif.ver10.display.wsdl; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAnyElement; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import org.w3c.dom.Element; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="VideoOutput" type="{http://www.onvif.org/ver10/schema}ReferenceToken"/&gt; * &lt;any processContents='lax' maxOccurs="unbounded" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "videoOutput", "any" }) @XmlRootElement(name = "GetLayout") public class GetLayout { @XmlElement(name = "VideoOutput", required = true) protected String videoOutput; @XmlAnyElement(lax = true) protected List<Object> any; /** * Gets the value of the videoOutput property. * * @return * possible object is * {@link String } * */ public String getVideoOutput() { return videoOutput; } /** * Sets the value of the videoOutput property. * * @param value * allowed object is * {@link String } * */ public void setVideoOutput(String value) { this.videoOutput = value; } /** * Gets the value of the any property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the any property. * * <p> * For example, to add a new item, do as follows: * <pre> * getAny().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Element } * {@link Object } * * */ public List<Object> getAny() { if (any == null) { any = new ArrayList<Object>(); } return this.any; } }
package fastsimjava.components; import java.io.BufferedReader; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; public class FSJSimConstants { public float airDensity,gravity,h2KWhPerKg,kWhPerGGE,kWhPerGalDiesel,gasKWhPerKg,dieselKWhPerKg,cngKWhPerM3; public float refAmbTempC,refAtmPressureBar; public static final float mphPerMps = 2.2369f; //Miles per hour / meters per second public static final float metersPerMile = 1609; //Meters per mile public static final float inchesPerMeter = 39.37f; //Inches per meter public static final float litrePerGal = 3.78541f; //Liters per Gallon public static float litrePer100km(float milesPerGal) { float gpm = 1f/milesPerGal; return gpm*litrePerGal*100000f/metersPerMile; } //Default constructor public FSJSimConstants() { airDensity = 1.2f; //kg/m3 (slightly different from exact air density of 1.2041 at standard temperature of 20c and 1atm = 101.325 kPa) gravity = 9.81f; //m/s2 h2KWhPerKg = FuelCalcConstants.Default_h2KWhPerKg; kWhPerGGE = FuelCalcConstants.Default_kWhPerGGE; kWhPerGalDiesel = FuelCalcConstants.Default_kWhPerGalDiesel; cngKWhPerM3 = FuelCalcConstants.Default_cngKWhPerM3; gasKWhPerKg = 13.1f; //kWh per kilogram of Gasoline dieselKWhPerKg = 12.61f; //kWh per kilogram of Diesel refAmbTempC = 20f; //Degrees Celsius refAtmPressureBar = 1.0098f; //Bar } //Constructor via CSV file public FSJSimConstants(String fname) { readFromCSVFile(fname); } //Function to write contents to CSV file public void writeToCSVFile(String fname) { try { String lsep = System.getProperty("line.separator"); FileWriter fWriter = new FileWriter(fname); fWriter.append(headerString()); fWriter.append(lsep); fWriter.append(toString()); fWriter.append(lsep); fWriter.flush(); fWriter.close(); } catch (IOException e) {} } //Function to read contents from CSV file public void readFromCSVFile(String fname) { try { BufferedReader readingBuffer=new BufferedReader(new FileReader(fname)); String readLine = readingBuffer.readLine(); readLine = readingBuffer.readLine(); parseFromString(readLine); readingBuffer.close(); } catch (IOException e) {} } //Header string public static String headerString() { return "airDensity,gravity,h2KWhPerKg,kWhPerGGE,kWhPerGalDiesel,gasKWhPerKg,dieselKWhPerKg,refAmbTempC,refAtmPressureBar"; } //Form Equivalent string @Override public String toString() { return ""+airDensity+","+gravity+","+h2KWhPerKg+","+kWhPerGGE+","+kWhPerGalDiesel+","+ gasKWhPerKg+","+dieselKWhPerKg+","+refAmbTempC+","+refAtmPressureBar; } //Function to parse values from equivalent string public void parseFromString(String readLine) { String[] strSplit = readLine.split(","); airDensity = Float.parseFloat(strSplit[0]); gravity = Float.parseFloat(strSplit[1]); h2KWhPerKg = Float.parseFloat(strSplit[2]); kWhPerGGE = Float.parseFloat(strSplit[3]); kWhPerGalDiesel = Float.parseFloat(strSplit[4]); gasKWhPerKg = Float.parseFloat(strSplit[5]); dieselKWhPerKg = Float.parseFloat(strSplit[6]); refAmbTempC = Float.parseFloat(strSplit[7]); refAtmPressureBar = Float.parseFloat(strSplit[8]); } }
package com.prowidesoftware.swift.model.mx.dic; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; /** * Acceptor parameters dedicated to the acquirer protocol. * * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "AcquirerProtocolParameters3", propOrder = { "acqrrId", "applId", "hst", "onLineTx", "offLineTx", "rcncltnXchg", "rcncltnByAcqrr", "ttlsPerCcy", "spltTtls", "cardDataVrfctn", "btchTrfCntt", "msgItm", "prtctCardData" }) public class AcquirerProtocolParameters3 { @XmlElement(name = "AcqrrId", required = true) protected List<GenericIdentification32> acqrrId; @XmlElement(name = "ApplId") protected List<String> applId; @XmlElement(name = "Hst") protected List<AcquirerHostConfiguration2> hst; @XmlElement(name = "OnLineTx") protected AcquirerProtocolParameters4 onLineTx; @XmlElement(name = "OffLineTx") protected AcquirerProtocolParameters4 offLineTx; @XmlElement(name = "RcncltnXchg") protected ExchangeConfiguration2 rcncltnXchg; @XmlElement(name = "RcncltnByAcqrr") protected Boolean rcncltnByAcqrr; @XmlElement(name = "TtlsPerCcy") protected Boolean ttlsPerCcy; @XmlElement(name = "SpltTtls") protected Boolean spltTtls; @XmlElement(name = "CardDataVrfctn") protected Boolean cardDataVrfctn; @XmlElement(name = "BtchTrfCntt") @XmlSchemaType(name = "string") protected List<BatchTransactionType1Code> btchTrfCntt; @XmlElement(name = "MsgItm") protected List<MessageItemCondition1> msgItm; @XmlElement(name = "PrtctCardData") protected boolean prtctCardData; /** * Gets the value of the acqrrId property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the acqrrId property. * * <p> * For example, to add a new item, do as follows: * <pre> * getAcqrrId().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link GenericIdentification32 } * * */ public List<GenericIdentification32> getAcqrrId() { if (acqrrId == null) { acqrrId = new ArrayList<GenericIdentification32>(); } return this.acqrrId; } /** * Gets the value of the applId property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the applId property. * * <p> * For example, to add a new item, do as follows: * <pre> * getApplId().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getApplId() { if (applId == null) { applId = new ArrayList<String>(); } return this.applId; } /** * Gets the value of the hst property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the hst property. * * <p> * For example, to add a new item, do as follows: * <pre> * getHst().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link AcquirerHostConfiguration2 } * * */ public List<AcquirerHostConfiguration2> getHst() { if (hst == null) { hst = new ArrayList<AcquirerHostConfiguration2>(); } return this.hst; } /** * Gets the value of the onLineTx property. * * @return * possible object is * {@link AcquirerProtocolParameters4 } * */ public AcquirerProtocolParameters4 getOnLineTx() { return onLineTx; } /** * Sets the value of the onLineTx property. * * @param value * allowed object is * {@link AcquirerProtocolParameters4 } * */ public AcquirerProtocolParameters3 setOnLineTx(AcquirerProtocolParameters4 value) { this.onLineTx = value; return this; } /** * Gets the value of the offLineTx property. * * @return * possible object is * {@link AcquirerProtocolParameters4 } * */ public AcquirerProtocolParameters4 getOffLineTx() { return offLineTx; } /** * Sets the value of the offLineTx property. * * @param value * allowed object is * {@link AcquirerProtocolParameters4 } * */ public AcquirerProtocolParameters3 setOffLineTx(AcquirerProtocolParameters4 value) { this.offLineTx = value; return this; } /** * Gets the value of the rcncltnXchg property. * * @return * possible object is * {@link ExchangeConfiguration2 } * */ public ExchangeConfiguration2 getRcncltnXchg() { return rcncltnXchg; } /** * Sets the value of the rcncltnXchg property. * * @param value * allowed object is * {@link ExchangeConfiguration2 } * */ public AcquirerProtocolParameters3 setRcncltnXchg(ExchangeConfiguration2 value) { this.rcncltnXchg = value; return this; } /** * Gets the value of the rcncltnByAcqrr property. * * @return * possible object is * {@link Boolean } * */ public Boolean isRcncltnByAcqrr() { return rcncltnByAcqrr; } /** * Sets the value of the rcncltnByAcqrr property. * * @param value * allowed object is * {@link Boolean } * */ public AcquirerProtocolParameters3 setRcncltnByAcqrr(Boolean value) { this.rcncltnByAcqrr = value; return this; } /** * Gets the value of the ttlsPerCcy property. * * @return * possible object is * {@link Boolean } * */ public Boolean isTtlsPerCcy() { return ttlsPerCcy; } /** * Sets the value of the ttlsPerCcy property. * * @param value * allowed object is * {@link Boolean } * */ public AcquirerProtocolParameters3 setTtlsPerCcy(Boolean value) { this.ttlsPerCcy = value; return this; } /** * Gets the value of the spltTtls property. * * @return * possible object is * {@link Boolean } * */ public Boolean isSpltTtls() { return spltTtls; } /** * Sets the value of the spltTtls property. * * @param value * allowed object is * {@link Boolean } * */ public AcquirerProtocolParameters3 setSpltTtls(Boolean value) { this.spltTtls = value; return this; } /** * Gets the value of the cardDataVrfctn property. * * @return * possible object is * {@link Boolean } * */ public Boolean isCardDataVrfctn() { return cardDataVrfctn; } /** * Sets the value of the cardDataVrfctn property. * * @param value * allowed object is * {@link Boolean } * */ public AcquirerProtocolParameters3 setCardDataVrfctn(Boolean value) { this.cardDataVrfctn = value; return this; } /** * Gets the value of the btchTrfCntt property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the btchTrfCntt property. * * <p> * For example, to add a new item, do as follows: * <pre> * getBtchTrfCntt().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link BatchTransactionType1Code } * * */ public List<BatchTransactionType1Code> getBtchTrfCntt() { if (btchTrfCntt == null) { btchTrfCntt = new ArrayList<BatchTransactionType1Code>(); } return this.btchTrfCntt; } /** * Gets the value of the msgItm property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the msgItm property. * * <p> * For example, to add a new item, do as follows: * <pre> * getMsgItm().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link MessageItemCondition1 } * * */ public List<MessageItemCondition1> getMsgItm() { if (msgItm == null) { msgItm = new ArrayList<MessageItemCondition1>(); } return this.msgItm; } /** * Gets the value of the prtctCardData property. * */ public boolean isPrtctCardData() { return prtctCardData; } /** * Sets the value of the prtctCardData property. * */ public AcquirerProtocolParameters3 setPrtctCardData(boolean value) { this.prtctCardData = value; return this; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE); } @Override public boolean equals(Object that) { return EqualsBuilder.reflectionEquals(this, that); } @Override public int hashCode() { return HashCodeBuilder.reflectionHashCode(this); } /** * Adds a new item to the acqrrId list. * @see #getAcqrrId() * */ public AcquirerProtocolParameters3 addAcqrrId(GenericIdentification32 acqrrId) { getAcqrrId().add(acqrrId); return this; } /** * Adds a new item to the applId list. * @see #getApplId() * */ public AcquirerProtocolParameters3 addApplId(String applId) { getApplId().add(applId); return this; } /** * Adds a new item to the hst list. * @see #getHst() * */ public AcquirerProtocolParameters3 addHst(AcquirerHostConfiguration2 hst) { getHst().add(hst); return this; } /** * Adds a new item to the btchTrfCntt list. * @see #getBtchTrfCntt() * */ public AcquirerProtocolParameters3 addBtchTrfCntt(BatchTransactionType1Code btchTrfCntt) { getBtchTrfCntt().add(btchTrfCntt); return this; } /** * Adds a new item to the msgItm list. * @see #getMsgItm() * */ public AcquirerProtocolParameters3 addMsgItm(MessageItemCondition1 msgItm) { getMsgItm().add(msgItm); return this; } }
package com.alipay.api.response; import java.util.List; import com.alipay.api.internal.mapping.ApiField; import com.alipay.api.internal.mapping.ApiListField; import com.alipay.api.domain.AntfarmProjectInfo; import com.alipay.api.AlipayResponse; /** * ALIPAY API: alipay.social.antfarm.projectinfo.query response. * * @author auto create * @since 1.0, 2021-07-05 19:55:22 */ public class AlipaySocialAntfarmProjectinfoQueryResponse extends AlipayResponse { private static final long serialVersionUID = 7793751741953137726L; /** * 正在进行的项目,含当前正在进行的标的物 */ @ApiListField("project_list") @ApiField("antfarm_project_info") private List<AntfarmProjectInfo> projectList; public void setProjectList(List<AntfarmProjectInfo> projectList) { this.projectList = projectList; } public List<AntfarmProjectInfo> getProjectList( ) { return this.projectList; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ranger.entity; import java.io.Serializable; import java.util.Objects; import javax.persistence.Cacheable; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.xml.bind.annotation.XmlRootElement; /** * The persistent class for the x_policy_ref_group database table. * */ @Entity @Cacheable @XmlRootElement @Table(name="x_policy_ref_group") public class XXPolicyRefGroup implements Serializable { private static final long serialVersionUID = 1L; /** * id of the XXPolicyRefGroup * <ul> * </ul> * */ @Id @SequenceGenerator(name = "x_policy_ref_group_SEQ", sequenceName = "x_policy_ref_group_SEQ", allocationSize = 1) @GeneratedValue(strategy = GenerationType.AUTO, generator = "x_policy_ref_group_SEQ") @Column(name = "id") protected Long id; /** * policyId of the XXPolicyRefGroup * <ul> * </ul> * */ @Column(name = "policy_id") protected Long policyId; /** * groupId of the XXPolicyRefGroup * <ul> * </ul> * */ @Column(name = "group_id") protected Long groupId; /** * groupName of the XXPolicyRefGroup * <ul> * </ul> * */ @Column(name = "group_name") protected String groupName; /** * This method sets the value to the member attribute <b> id</b> . You * cannot set null to the attribute. * * @param id * Value to set member attribute <b> id</b> */ public void setId(Long id) { this.id = id; } /** * Returns the value for the member attribute <b>id</b> * * @return Date - value of member attribute <b>id</b> . */ public Long getId() { return this.id; } /** * This method sets the value to the member attribute <b> policyId</b> . * You cannot set null to the attribute. * * @param policyId * Value to set member attribute <b> policyId</b> */ public void setPolicyId(Long policyId) { this.policyId = policyId; } /** * Returns the value for the member attribute <b>policyId</b> * * @return Date - value of member attribute <b>policyId</b> . */ public Long getPolicyId() { return this.policyId; } /** * This method sets the value to the member attribute <b> groupId</b> . * You cannot set null to the attribute. * * @param groupId * Value to set member attribute <b> groupId</b> */ public void setGroupId(Long groupId) { this.groupId = groupId; } /** * Returns the value for the member attribute <b>groupId</b> * * @return Date - value of member attribute <b>groupId</b> . */ public Long getGroupId() { return groupId; } /** * This method sets the value to the member attribute <b> groupName</b> . * You cannot set null to the attribute. * * @param groupName * Value to set member attribute <b> groupName</b> */ public void setGroupName(String groupName) { this.groupName = groupName; } /** * Returns the value for the member attribute <b>groupName</b> * * @return Date - value of member attribute <b>groupName</b> . */ public String getGroupName() { return groupName; } @Override public int hashCode() { return Objects.hash(super.hashCode(), id, policyId, groupId, groupName); } /* * (non-Javadoc) * * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (getClass() != obj.getClass()) { return false; } XXPolicyRefGroup other = (XXPolicyRefGroup) obj; return super.equals(obj) && Objects.equals(id, other.id) && Objects.equals(policyId, other.policyId) && Objects.equals(groupId, other.groupId) && Objects.equals(groupName, other.groupName); } /* * (non-Javadoc) * * @see java.lang.Object#toString() */ @Override public String toString() { return "XXPolicyRefGroup [" + super.toString() + " id=" + id + ", policyId=" + policyId + ", groupId=" + groupId + ", groupName=" + groupName + "]"; } }
/* * This file is part of WebGoat, an Open Web Application Security Project utility. For details, please see http://www.owasp.org/ * * Copyright (c) 2002 - 2019 Bruce Mayhew * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU General Public License as published by the Free Software Foundation; either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License along with this program; if * not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA * 02111-1307, USA. * * Getting Source ============== * * Source for this application is maintained at https://github.com/WebGoat/WebGoat, a repository for free software projects. */ package org.owasp.webgoat.sql_injection.introduction; import org.owasp.webgoat.assignments.AssignmentEndpoint; import org.owasp.webgoat.assignments.AssignmentHints; import org.owasp.webgoat.assignments.AttackResult; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.RestController; import javax.sql.DataSource; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; @RestController @AssignmentHints(value = {"SqlStringInjectionHint.10.1", "SqlStringInjectionHint.10.2", "SqlStringInjectionHint.10.3", "SqlStringInjectionHint.10.4", "SqlStringInjectionHint.10.5", "SqlStringInjectionHint.10.6"}) public class SqlInjectionLesson10 extends AssignmentEndpoint { private final DataSource dataSource; public SqlInjectionLesson10(DataSource dataSource) { this.dataSource = dataSource; } @PostMapping("/SqlInjection/attack10") @ResponseBody public AttackResult completed(@RequestParam String action_string) { return injectableQueryAvailability(action_string); } protected AttackResult injectableQueryAvailability(String action) { StringBuffer output = new StringBuffer(); String query = "SELECT * FROM access_log WHERE action LIKE '%" + action + "%'"; try (Connection connection = dataSource.getConnection()) { try { Statement statement = connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet results = statement.executeQuery(query); if (results.getStatement() != null) { results.first(); output.append(SqlInjectionLesson8.generateTable(results)); return failed(this).feedback("sql-injection.10.entries").output(output.toString()).build(); } else { if (tableExists(connection)) { return failed(this).feedback("sql-injection.10.entries").output(output.toString()).build(); } else { return success(this).feedback("sql-injection.10.success").build(); } } } catch (SQLException e) { if (tableExists(connection)) { return failed(this).feedback("sql-injection.error").output("<span class='feedback-negative'>" + e.getMessage() + "</span><br>" + output.toString()).build(); } else { return success(this).feedback("sql-injection.10.success").build(); } } } catch (Exception e) { return failed(this).output("<span class='feedback-negative'>" + e.getMessage() + "</span>").build(); } } private boolean tableExists(Connection connection) { try { Statement stmt = connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet results = stmt.executeQuery("SELECT * FROM access_log"); int cols = results.getMetaData().getColumnCount(); return (cols > 0); } catch (SQLException e) { String errorMsg = e.getMessage(); if (errorMsg.contains("object not found: ACCESS_LOG")) { return false; } else { System.err.println(e.getMessage()); return false; } } } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.inputmethod.keyboard.layout; import static com.android.inputmethod.keyboard.layout.DevanagariLetterConstants.*; import com.android.inputmethod.keyboard.KeyboardId; import com.android.inputmethod.keyboard.layout.Hindi.HindiCustomizer; import com.android.inputmethod.keyboard.layout.Hindi.HindiSymbols; import com.android.inputmethod.keyboard.layout.expected.ExpectedKey; import com.android.inputmethod.keyboard.layout.expected.ExpectedKeyboardBuilder; import java.util.Locale; /** * The nepali_romanized layout */ public final class NepaliRomanized extends LayoutBase { private static final String LAYOUT_NAME = "nepali_romanized"; public NepaliRomanized(final LayoutCustomizer customizer) { super(customizer, HindiSymbols.class, SymbolsShifted.class); } @Override public String getName() { return LAYOUT_NAME; } public static class NepaliRomanizedCustomizer extends HindiCustomizer { public NepaliRomanizedCustomizer(final Locale locale) { super(locale); } @Override public ExpectedKey getCurrencyKey() { return CURRENCY_NEPALI; } @Override public ExpectedKey[] getSpaceKeys(final boolean isPhone) { return joinKeys(LANGUAGE_SWITCH_KEY, SPACE_KEY, key(ZWNJ_KEY, ZWJ_KEY)); } // U+0930/U+0941/U+002E "रु." NEPALESE RUPEE SIGN private static final ExpectedKey CURRENCY_NEPALI = key("\u0930\u0941\u002E", Symbols.DOLLAR_SIGN, Symbols.CENT_SIGN, Symbols.EURO_SIGN, Symbols.POUND_SIGN, Symbols.YEN_SIGN, Symbols.PESO_SIGN); } @Override ExpectedKey[][] getCommonAlphabetLayout(boolean isPhone) { return ALPHABET_COMMON; } @Override ExpectedKey[][] getCommonAlphabetShiftLayout(boolean isPhone, final int elementId) { if (elementId == KeyboardId.ELEMENT_ALPHABET_AUTOMATIC_SHIFTED) { return getCommonAlphabetLayout(isPhone); } return ALPHABET_SHIFTED_COMMON; } private static final ExpectedKey[][] ALPHABET_COMMON = new ExpectedKeyboardBuilder() .setKeysOfRow(1, // U+091F: "ट" DEVANAGARI LETTER TTA // U+0967: "१" DEVANAGARI DIGIT ONE // U+093C: "़" DEVANAGARI SIGN NUKTA key("\u091F", joinMoreKeys("\u0967", "1", key(SIGN_NUKTA, "\u093C"))), // U+094C: "ौ" DEVANAGARI VOWEL SIGN AU // U+0968: "२" DEVANAGARI DIGIT TWO key(VOWEL_SIGN_AU, "\u094C", joinMoreKeys("\u0968", "2")), // U+0947: "े" DEVANAGARI VOWEL SIGN E // U+0969: "३" DEVANAGARI DIGIT THREE key(VOWEL_SIGN_E, "\u0947", joinMoreKeys("\u0969", "3")), // U+0930: "र" DEVANAGARI LETTER RA // U+096A: "४" DEVANAGARI DIGIT FOUR key("\u0930", joinMoreKeys("\u096A", "4")), // U+0924: "त" DEVANAGARI LETTER TA // U+096B: "५" DEVANAGARI DIGIT FIVE key("\u0924", joinMoreKeys("\u096B", "5")), // U+092F: "य" DEVANAGARI LETTER YA // U+096C: "६" DEVANAGARI DIGIT SIX key("\u092F", joinMoreKeys("\u096C", "6")), // U+0941: "ु" DEVANAGARI VOWEL SIGN U // U+096D: "७" DEVANAGARI DIGIT SEVEN key(VOWEL_SIGN_U, "\u0941", joinMoreKeys("\u096D", "7")), // U+093F: "ि" DEVANAGARI VOWEL SIGN I // U+096E: "८" DEVANAGARI DIGIT EIGHT key(VOWEL_SIGN_I, "\u093F", joinMoreKeys("\u096E", "8")), // U+094B: "ो" DEVANAGARI VOWEL SIGN O // U+096F: "९" DEVANAGARI DIGIT NINE key(VOWEL_SIGN_O, "\u094B", joinMoreKeys("\u096F", "9")), // U+092A: "प" DEVANAGARI LETTER PA // U+0966: "०" DEVANAGARI DIGIT ZERO key("\u092A", joinMoreKeys("\u0966", "0")), // U+0907: "इ" DEVANAGARI LETTER I "\u0907") .setKeysOfRow(2, // U+093E: "ा" DEVANAGARI VOWEL SIGN AA key(VOWEL_SIGN_AA, "\u093E"), // U+0938: "स" DEVANAGARI LETTER SA // U+0926: "द" DEVANAGARI LETTER DA // U+0909: "उ" DEVANAGARI LETTER U // U+0917: "ग" DEVANAGARI LETTER GA // U+0939: "ह" DEVANAGARI LETTER HA // U+091C: "ज" DEVANAGARI LETTER JA // U+0915: "क" DEVANAGARI LETTER KA // U+0932: "ल" DEVANAGARI LETTER LA // U+090F: "ए" DEVANAGARI LETTER E // U+0950: "ॐ" DEVANAGARI OM "\u0938", "\u0926", "\u0909", "\u0917", "\u0939", "\u091C", "\u0915", "\u0932", "\u090F", "\u0950") .setKeysOfRow(3, // U+0937: "ष" DEVANAGARI LETTER SSA // U+0921: "ड" DEVANAGARI LETTER DDA // U+091A: "च" DEVANAGARI LETTER CA // U+0935: "व" DEVANAGARI LETTER VA // U+092C: "ब" DEVANAGARI LETTER BHA // U+0928: "न" DEVANAGARI LETTER NA // U+092E: "म" DEVANAGARI LETTER MA "\u0937", "\u0921", "\u091A", "\u0935", "\u092C", "\u0928", "\u092E", // U+0964: "।" DEVANAGARI DANDA // U+093D: "ऽ" DEVANAGARI SIGN AVAGRAHA key("\u0964", moreKey("\u093D")), // U+094D: "्" DEVANAGARI SIGN VIRAMA key(SIGN_VIRAMA, "\u094D")) .build(); private static final ExpectedKey[][] ALPHABET_SHIFTED_COMMON = new ExpectedKeyboardBuilder() .setKeysOfRow(1, // U+0920: "ठ" DEVANAGARI LETTER TTHA // U+0914: "औ" DEVANAGARI LETTER AU "\u0920", "\u0914", // U+0948: "ै" DEVANAGARI VOWEL SIGN AI key(VOWEL_SIGN_AI, "\u0948"), // U+0943: "ृ" DEVANAGARI VOWEL SIGN VOCALIC R key(VOWEL_SIGN_VOCALIC_R, "\u0943"), // U+0925: "थ" DEVANAGARI LETTER THA // U+091E: "ञ" DEVANAGARI LETTER NYA "\u0925", "\u091E", // U+0942: "ू" DEVANAGARI VOWEL SIGN UU key(VOWEL_SIGN_UU, "\u0942"), // U+0940: "ी" DEVANAGARI VOWEL SIGN II key(VOWEL_SIGN_II, "\u0940"), // U+0913: "ओ" DEVANAGARI LETTER O // U+092B: "फ" DEVANAGARI LETTER PHA // U+0908: "ई" DEVANAGARI LETTER II "\u0913", "\u092B", "\u0908") .setKeysOfRow(2, // U+0906: "आ" DEVANAGARI LETTER AA // U+0936: "श" DEVANAGARI LETTER SHA // U+0927: "ध" DEVANAGARI LETTER DHA // U+090A: "ऊ" DEVANAGARI LETTER UU // U+0918: "घ" DEVANAGARI LETTER GHA // U+0905: "अ" DEVANAGARI LETTER A // U+091D: "झ" DEVANAGARI LETTER JHA // U+0916: "ख" DEVANAGARI LETTER KHA // U+0965: "॥" DEVANAGARI DOUBLE DANDA // U+0910: "ऐ" DEVANAGARI LETTER AI // U+0903: "ः" DEVANAGARI SIGN VISARGA "\u0906", "\u0936", "\u0927", "\u090A", "\u0918", "\u0905", "\u091D", "\u0916", "\u0965", "\u0910", key(SIGN_VISARGA, "\u0903")) .setKeysOfRow(3, // U+090B: "ऋ" DEVANAGARI LETTER VOCALIC R // U+0922: "ढ" DEVANAGARI LETTER DDHA // U+091B: "छ" DEVANAGARI LETTER CHA "\u090B", "\u0922", "\u091B", // U+0901: "ँ" DEVANAGARI SIGN CANDRABINDU key(SIGN_CANDRABINDU, "\u0901"), // U+092D: "भ" DEVANAGARI LETTER BHA // U+0923: "ण" DEVANAGARI LETTER NNA "\u092D", "\u0923", // U+0902: "ं" DEVANAGARI SIGN ANUSVARA key(SIGN_ANUSVARA, "\u0902"), // U+0919: "ङ" DEVANAGARI LETTER NGA "\u0919", // U+094D: "्" DEVANAGARI SIGN VIRAMA key(SIGN_VIRAMA, "\u094D")) .build(); }
/********************************************************************************* * * * The MIT License (MIT) * * * * Copyright (c) 2015-2022 aoju.org and other contributors. * * * * Permission is hereby granted, free of charge, to any person obtaining a copy * * of this software and associated documentation files (the "Software"), to deal * * in the Software without restriction, including without limitation the rights * * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * * copies of the Software, and to permit persons to whom the Software is * * furnished to do so, subject to the following conditions: * * * * The above copyright notice and this permission notice shall be included in * * all copies or substantial portions of the Software. * * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * * THE SOFTWARE. * * * ********************************************************************************/ package org.aoju.bus.tracer.config; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.Enumeration; import java.util.Properties; /** * @author Kimi Liu * @version 6.5.0 * @since Java 17+ */ public final class TracePropertiesFileLoader { public Properties loadTraceProperties(String TracePropertiesFile) throws IOException { final Properties properties = new Properties(); final ClassLoader loader = Thread.currentThread().getContextClassLoader(); final Enumeration<URL> TracePropertyFiles = loader.getResources(TracePropertiesFile); while (TracePropertyFiles.hasMoreElements()) { final URL url = TracePropertyFiles.nextElement(); try (InputStream stream = url.openStream()) { properties.load(stream); } } return properties; } }
/* * Copyright 1999-2011 Alibaba Group. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.dubbo.rpc.cluster.directory; import java.util.List; import com.alibaba.dubbo.common.URL; import com.alibaba.dubbo.rpc.Invocation; import com.alibaba.dubbo.rpc.Invoker; import com.alibaba.dubbo.rpc.RpcException; import com.alibaba.dubbo.rpc.cluster.Router; /** * StaticDirectory * * @author william.liangf */ public class StaticDirectory<T> extends AbstractDirectory<T> { private final List<Invoker<T>> invokers; public StaticDirectory(List<Invoker<T>> invokers){ this(null, invokers, null); } public StaticDirectory(List<Invoker<T>> invokers, List<Router> routers){ this(null, invokers, routers); } public StaticDirectory(URL url, List<Invoker<T>> invokers) { this(url, invokers, null); } public StaticDirectory(URL url, List<Invoker<T>> invokers, List<Router> routers) { super(url == null && invokers != null && invokers.size() > 0 ? invokers.get(0).getUrl() : url, routers); if (invokers == null || invokers.size() == 0) throw new IllegalArgumentException("invokers == null"); this.invokers = invokers; } public Class<T> getInterface() { return invokers.get(0).getInterface(); } public boolean isAvailable() { if (isDestroyed()) { return false; } for (Invoker<T> invoker : invokers) { if (invoker.isAvailable()) { return true; } } return false; } public void destroy() { if(isDestroyed()) { return; } super.destroy(); for (Invoker<T> invoker : invokers) { invoker.destroy(); } invokers.clear(); } @Override protected List<Invoker<T>> doList(Invocation invocation) throws RpcException { return invokers; } }
// Copyright 2016 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.firebase.storage.internal.cpp; import com.google.firebase.storage.StreamDownloadTask; import java.io.IOException; import java.io.InputStream; /** A stream downloader into a C++ byte array. * */ public class CppByteDownloader implements StreamDownloadTask.StreamProcessor { protected long cppBufferPointer = 0; protected long cppBufferSize = 0; protected final Object lockObject = new Object(); /** * Construct a CppByteDownloader. First parameter is a C++ pointers, second is a number of bytes. */ public CppByteDownloader(long cppBufferPointer, long cppBufferSize) { this.cppBufferPointer = cppBufferPointer; this.cppBufferSize = cppBufferSize; } /** Discard native pointers from this instance. */ public void discardPointers() { synchronized (lockObject) { this.cppBufferPointer = 0; this.cppBufferSize = 0; } } @Override public void doInBackground(StreamDownloadTask.TaskSnapshot state, InputStream stream) throws IOException { try { long bufferOffset = 0; int bytesRead; byte[] bytes = new byte[16384]; while ((bytesRead = stream.read(bytes, 0, bytes.length)) != -1) { if (bufferOffset + bytesRead <= this.cppBufferSize) { writeBytesToBuffer(bufferOffset, bytes, bytesRead); bufferOffset += bytesRead; } else { throw new IndexOutOfBoundsException("The maximum allowed buffer size was exceeded."); } } } finally { stream.close(); } } public void writeBytesToBuffer(long bufferOffset, byte[] bytes, long numBytes) { synchronized (lockObject) { if (this.cppBufferPointer != 0) { writeBytes(this.cppBufferPointer, this.cppBufferSize, bufferOffset, bytes, numBytes); } } } private static native void writeBytes( long cppBufferPointer, long cppBufferSize, long cppBufferOffset, byte[] bytes, long numBytes); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package test.org.apache.commons.math.exception; import test.org.apache.commons.math.exception.util.ExceptionContextProvider; import test.org.apache.commons.math.exception.util.LocalizedFormats; /** * Class to signal parse failures. * * @since 2.2 * @version $Id$ */ public class MathParseException extends MathIllegalStateException implements ExceptionContextProvider { /** Serializable version Id. */ private static final long serialVersionUID = -6024911025449780478L; /** * @param wrong Bad string representation of the object. * @param position Index, in the {@code wrong} string, that caused the * parsing to fail. * @param type Class of the object supposedly represented by the * {@code wrong} string. */ public MathParseException(String wrong, int position, Class<?> type) { getContext().addMessage(LocalizedFormats.CANNOT_PARSE_AS_TYPE, wrong, Integer.valueOf(position), type.getName()); } /** * @param wrong Bad string representation of the object. * @param position Index, in the {@code wrong} string, that caused the * parsing to fail. */ public MathParseException(String wrong, int position) { getContext().addMessage(LocalizedFormats.CANNOT_PARSE, wrong, Integer.valueOf(position)); } }
package com.thinkbiganalytics.metadata.jpa.common; /*- * #%L * kylo-operational-metadata-jpa * %% * Copyright (C) 2017 ThinkBig Analytics * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Annotate methods that use entity access control */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) public @interface EntityAccessControlled { boolean enabled() default true; }
/* * Copyright 2018 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.plugin.log4j2; import com.navercorp.pinpoint.plugin.AgentPath; import com.navercorp.pinpoint.test.plugin.*; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.ThreadContext; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(PinpointPluginTestSuite.class) @PinpointAgent(AgentPath.PATH) @PinpointConfig("pinpoint-spring-bean-test.config") @JvmArgument("-Dlog4j2.contextSelector=org.apache.logging.log4j.core.async.AsyncLoggerContextSelector") @JvmVersion(7) @Dependency({"org.apache.logging.log4j:log4j-core:[2.0,)", "com.lmax:disruptor:[3.4.2]"}) public class Log4j2ForAsyncLoggerIT { @Test public void test() { Logger logger = LogManager.getLogger(); logger.error("for log4j2 plugin async logger test"); Assert.assertNotNull(ThreadContext.get("PtxId")); Assert.assertNotNull(ThreadContext.get("PspanId")); } }
package software.amazon.lightsail.loadbalancertlscertificate; class Configuration extends BaseConfiguration { public Configuration() { super("aws-lightsail-loadbalancertlscertificate.json"); } }
package com.ysyx.commons.wx.requests.wxmsg; import com.ysyx.commons.wx.TextHttpRequest; import com.ysyx.commons.wx.WXCommonResult; import com.ysyx.commons.wx.annotations.Post; import com.ysyx.commons.wx.annotations.QueryString; import org.codehaus.jackson.annotate.JsonProperty; /** * add kf request. * * @author duanbn */ @Post("/customservice/kfaccount/add") public class AddKfAccountRequest extends TextHttpRequest<WXCommonResult> { @QueryString("access_token") private final String accessToken; @JsonProperty("kf_account") private final String kfAccount; @JsonProperty("nickname") private final String nickName; @JsonProperty("password") private String password; /** * * @param accessToken * @param kfAccount * @param nickName */ public AddKfAccountRequest(final String accessToken, final String kfAccount, final String nickName) { this.accessToken = accessToken; this.kfAccount = kfAccount; this.nickName = nickName; } /** * */ public String getPassword() { return this.password; // FIXME: fast json encode error. // return DigestUtil.md5(this.password); } /** * */ public void setPassword(final String password) { this.password = password; } /** * * @return */ public String getKfAccount() { return kfAccount; } /** * * @return */ public String getNickName() { return nickName; } }
/** * Copyright (C) 2013-2017 Expedia Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hotels.styx.api.io; import com.hotels.styx.api.Resource; import org.testng.annotations.Test; import static com.hotels.styx.api.io.ResourcePathMatcher.resourceWithPath; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; public class ClasspathResourceIndexTest { @Test public void listsByPathAndSuffix() { ClassLoader classLoader = ClasspathResourceIndexTest.class.getClassLoader(); ClasspathResourceIndex index = new ClasspathResourceIndex(classLoader); Iterable<Resource> resources = index.list("com/hotels/styx/api/io", ".txt"); assertThat(resources, containsInAnyOrder( resourceWithPath("resource.txt"), resourceWithPath("subdirectory/subdir-resource.txt") )); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.binding.corba.interceptors; import java.util.Iterator; import java.util.List; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamReader; import org.apache.cxf.binding.corba.CorbaDestination; import org.apache.cxf.binding.corba.CorbaMessage; import org.apache.cxf.binding.corba.CorbaStreamable; import org.apache.cxf.binding.corba.CorbaTypeMap; import org.apache.cxf.binding.corba.runtime.CorbaStreamReader; import org.apache.cxf.binding.corba.types.CorbaHandlerUtils; import org.apache.cxf.binding.corba.types.CorbaObjectHandler; import org.apache.cxf.binding.corba.types.CorbaTypeEventProducer; import org.apache.cxf.binding.corba.types.HandlerIterator; import org.apache.cxf.binding.corba.types.ParameterEventProducer; import org.apache.cxf.binding.corba.types.WrappedParameterSequenceEventProducer; import org.apache.cxf.binding.corba.utils.ContextUtils; import org.apache.cxf.binding.corba.utils.CorbaAnyHelper; import org.apache.cxf.binding.corba.utils.CorbaUtils; import org.apache.cxf.binding.corba.wsdl.ModeType; import org.apache.cxf.binding.corba.wsdl.OperationType; import org.apache.cxf.binding.corba.wsdl.ParamType; import org.apache.cxf.endpoint.Endpoint; import org.apache.cxf.interceptor.Fault; import org.apache.cxf.message.Exchange; import org.apache.cxf.message.Message; import org.apache.cxf.phase.AbstractPhaseInterceptor; import org.apache.cxf.phase.Phase; import org.apache.cxf.service.model.BindingInfo; import org.apache.cxf.service.model.BindingMessageInfo; import org.apache.cxf.service.model.BindingOperationInfo; import org.apache.cxf.service.model.EndpointInfo; import org.apache.cxf.service.model.InterfaceInfo; import org.apache.cxf.service.model.MessageInfo; import org.apache.cxf.service.model.MessagePartInfo; import org.apache.cxf.service.model.OperationInfo; import org.apache.cxf.service.model.ServiceInfo; import org.apache.cxf.ws.addressing.EndpointReferenceType; import org.omg.CORBA.Any; import org.omg.CORBA.NVList; import org.omg.CORBA.ORB; import org.omg.CORBA.ServerRequest; public class CorbaStreamInInterceptor extends AbstractPhaseInterceptor<Message> { public CorbaStreamInInterceptor() { super(Phase.PRE_STREAM); } public void handleMessage(Message message) throws Fault { if (ContextUtils.isRequestor(message)) { handleReply(message); } else { handleRequest(message); } } private void handleReply(Message msg) { ORB orb; ServiceInfo service; CorbaDestination destination; if (msg.getDestination() != null) { destination = (CorbaDestination)msg.getDestination(); } else { destination = (CorbaDestination)msg.getExchange().getDestination(); } service = destination.getBindingInfo().getService(); CorbaMessage message = (CorbaMessage)msg; if (message.getStreamableException() != null || message.getSystemException() != null) { message.setContent(Exception.class, message.getExchange().getOutMessage().getContent(Exception.class)); Endpoint ep = message.getExchange().getEndpoint(); message.getInterceptorChain().abort(); if (ep.getInFaultObserver() != null) { ep.getInFaultObserver().onMessage(message); return; } } CorbaMessage outMessage = (CorbaMessage)message.getExchange().getOutMessage(); orb = message.getExchange().get(ORB.class); HandlerIterator paramIterator = new HandlerIterator(outMessage, false); final CorbaTypeEventProducer eventProducer; Exchange exchange = message.getExchange(); BindingOperationInfo bindingOpInfo = exchange.getBindingOperationInfo(); BindingMessageInfo msgInfo = bindingOpInfo.getOutput(); boolean wrap = false; if (bindingOpInfo.isUnwrappedCapable()) { wrap = true; } if (wrap) { // wrapper element around our args // REVISIT, bravi, message name same as the element name QName wrapperElementQName = msgInfo.getMessageInfo().getName(); eventProducer = new WrappedParameterSequenceEventProducer(wrapperElementQName, paramIterator, service, orb); } else { eventProducer = new ParameterEventProducer(paramIterator, service, orb); } CorbaStreamReader reader = new CorbaStreamReader(eventProducer); message.setContent(XMLStreamReader.class, reader); } private void handleRequest(Message msg) { ORB orb; ServiceInfo service; CorbaDestination destination; if (msg.getDestination() != null) { destination = (CorbaDestination)msg.getDestination(); } else { destination = (CorbaDestination)msg.getExchange().getDestination(); } service = destination.getBindingInfo().getService(); CorbaMessage message = (CorbaMessage) msg; Exchange exchange = message.getExchange(); CorbaTypeMap typeMap = message.getCorbaTypeMap(); BindingInfo bInfo = destination.getBindingInfo(); InterfaceInfo info = bInfo.getInterface(); String opName = exchange.get(String.class); Iterator<BindingOperationInfo> i = bInfo.getOperations().iterator(); OperationType opType = null; BindingOperationInfo bopInfo = null; QName opQName = null; while (i.hasNext()) { bopInfo = i.next(); if (bopInfo.getName().getLocalPart().equals(opName)) { opType = bopInfo.getExtensor(OperationType.class); opQName = bopInfo.getName(); break; } } if (opType == null) { throw new RuntimeException("Couldn't find the binding operation for " + opName); } orb = exchange.get(ORB.class); ServerRequest request = exchange.get(ServerRequest.class); NVList list = prepareArguments(message, info, opType, opQName, typeMap, destination, service); request.arguments(list); message.setList(list); HandlerIterator paramIterator = new HandlerIterator(message, true); final CorbaTypeEventProducer eventProducer; BindingMessageInfo msgInfo = bopInfo.getInput(); boolean wrap = false; if (bopInfo.isUnwrappedCapable()) { wrap = true; } if (wrap) { // wrapper element around our args QName wrapperElementQName = msgInfo.getMessageInfo().getName(); eventProducer = new WrappedParameterSequenceEventProducer(wrapperElementQName, paramIterator, service, orb); } else { eventProducer = new ParameterEventProducer(paramIterator, service, orb); } CorbaStreamReader reader = new CorbaStreamReader(eventProducer); message.setContent(XMLStreamReader.class, reader); } protected NVList prepareArguments(CorbaMessage corbaMsg, InterfaceInfo info, OperationType opType, QName opQName, CorbaTypeMap typeMap, CorbaDestination destination, ServiceInfo service) { BindingInfo bInfo = destination.getBindingInfo(); EndpointInfo eptInfo = destination.getEndPointInfo(); BindingOperationInfo bOpInfo = bInfo.getOperation(opQName); OperationInfo opInfo = bOpInfo.getOperationInfo(); Exchange exg = corbaMsg.getExchange(); exg.put(BindingInfo.class, bInfo); exg.put(InterfaceInfo.class, info); exg.put(EndpointInfo.class, eptInfo); exg.put(EndpointReferenceType.class, destination.getAddress()); exg.put(ServiceInfo.class, service); exg.put(BindingOperationInfo.class, bOpInfo); exg.put(OperationInfo.class, opInfo); exg.put(MessageInfo.class, opInfo.getInput()); exg.put(String.class, opQName.getLocalPart()); exg.setInMessage(corbaMsg); corbaMsg.put(MessageInfo.class, opInfo.getInput()); List<ParamType> paramTypes = opType.getParam(); CorbaStreamable[] arguments = new CorbaStreamable[paramTypes.size()]; return prepareDIIArgsList(corbaMsg, bOpInfo, arguments, paramTypes, typeMap, exg.get(ORB.class), service); } protected NVList prepareDIIArgsList(CorbaMessage corbaMsg, BindingOperationInfo boi, CorbaStreamable[] streamables, List<ParamType> paramTypes, CorbaTypeMap map, ORB orb, ServiceInfo service) { try { // Build the list of DII arguments, returns, and exceptions NVList list = orb.create_list(streamables.length); OperationInfo opInfo = boi.getOperationInfo(); MessageInfo input = opInfo.getInput(); MessageInfo output = opInfo.getOutput(); String inWrapNSUri = null; String outWrapNSUri = null; boolean wrap = false; if (boi.isUnwrappedCapable()) { wrap = true; if (input != null) { inWrapNSUri = getWrappedParamNamespace(input); if (!CorbaUtils.isElementFormQualified(service, inWrapNSUri)) { inWrapNSUri = ""; } } if (output != null) { outWrapNSUri = getWrappedParamNamespace(output); if (!CorbaUtils.isElementFormQualified(service, outWrapNSUri)) { outWrapNSUri = ""; } } } int inMsgIndex = 0; int outMsgIndex = 0; for (int i = 0; i < paramTypes.size(); i++) { ParamType param = paramTypes.get(i); QName paramIdlType = param.getIdltype(); QName paramName; ModeType paramMode = param.getMode(); if ("in".equals(paramMode.value())) { if (wrap) { paramName = new QName(inWrapNSUri, param.getName()); } else { paramName = getMessageParamQName(input, param.getName(), inMsgIndex); inMsgIndex++; } } else { if (wrap) { paramName = new QName(outWrapNSUri, param.getName()); } else { paramName = getMessageParamQName(output, param.getName(), outMsgIndex); outMsgIndex++; } } CorbaObjectHandler obj = CorbaHandlerUtils.initializeObjectHandler(orb, paramName, paramIdlType, map, service); streamables[i] = corbaMsg.createStreamableObject(obj, paramName); Any value = CorbaAnyHelper.createAny(orb, corbaMsg.getExchange().getBus()); if ("in".equals(paramMode.value())) { streamables[i].setMode(org.omg.CORBA.ARG_IN.value); streamables[i].getObject().setIntoAny(value, streamables[i], false); } else if ("out".equals(paramMode.value())) { streamables[i].setMode(org.omg.CORBA.ARG_OUT.value); streamables[i].getObject().setIntoAny(value, streamables[i], true); } else { streamables[i].setMode(org.omg.CORBA.ARG_INOUT.value); streamables[i].getObject().setIntoAny(value, streamables[i], false); } list.add_value(streamables[i].getName(), value, streamables[i].getMode()); corbaMsg.addStreamableArgument(streamables[i]); } return list; } catch (Exception ex) { throw new RuntimeException(ex); } } protected QName getMessageParamQName(MessageInfo msgInfo, String paramName, int index) { QName paramQName = null; MessagePartInfo part = msgInfo.getMessageParts().get(index); if (part != null && part.isElement()) { paramQName = part.getElementQName(); } else if (part != null) { paramQName = part.getName(); } return paramQName; } protected String getWrappedParamNamespace(MessageInfo msgInfo) { MessagePartInfo part = msgInfo.getMessageParts().get(0); if (part.isElement()) { return part.getElementQName().getNamespaceURI(); } return part.getName().getNamespaceURI(); } }
package io.onedev.server.web.editable.buildspec.job.choice; import java.util.LinkedHashMap; import java.util.Map; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.util.convert.ConversionException; import io.onedev.server.model.Project; import io.onedev.server.web.component.job.JobSingleChoice; import io.onedev.server.web.editable.PropertyDescriptor; import io.onedev.server.web.editable.PropertyEditor; @SuppressWarnings("serial") public class JobSingleChoiceEditor extends PropertyEditor<String> { private JobSingleChoice input; public JobSingleChoiceEditor(String id, PropertyDescriptor propertyDescriptor, IModel<String> propertyModel) { super(id, propertyDescriptor, propertyModel); } @Override protected void onInitialize() { super.onInitialize(); Map<String, String> choices = new LinkedHashMap<>(); for (String jobName: Project.get().getJobNames()) choices.put(jobName, jobName); String selection = getModelObject(); if (!choices.containsKey(selection)) selection = null; input = new JobSingleChoice("input", Model.of(selection), Model.ofMap(choices)) { @Override protected void onInitialize() { super.onInitialize(); getSettings().configurePlaceholder(descriptor); } }; input.setRequired(descriptor.isPropertyRequired()); input.setLabel(Model.of(getDescriptor().getDisplayName())); add(input); } @Override protected String convertInputToValue() throws ConversionException { return input.getConvertedInput(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2018 the original author or authors. */ package org.assertj.core.error; import java.util.List; /** * Creates an <code>{@link AssertionError}</code> indicating that an assertion that verifies that an object * has null fields failed. * * @author Vladimir Chernikov */ public class ShouldHaveAllNullFields extends BasicErrorMessageFactory { private static final String EXPECTED_MULTIPLE = "%nExpecting%n <%s>%nto only have null properties or fields but these were not null:%n <%s>.%n"; private static final String EXPECTED_SINGLE = "%nExpecting%n <%s>%nto only have null property or field, but <%s> was not null.%n"; private static final String COMPARISON = "Check was performed on all fields/properties"; private static final String EXCLUDING = COMPARISON + " except: <%s>."; private static final String DOT = "."; public ShouldHaveAllNullFields(Object actual, List<String> nonNullFields, List<String> ignoredFields) { super(EXPECTED_MULTIPLE + EXCLUDING, actual, nonNullFields, ignoredFields); } public ShouldHaveAllNullFields(Object actual, List<String> nonNullFields) { super(EXPECTED_MULTIPLE + COMPARISON + DOT, actual, nonNullFields); } public ShouldHaveAllNullFields(Object actual, String nonNullField) { super(EXPECTED_SINGLE + COMPARISON + DOT, actual, nonNullField); } public ShouldHaveAllNullFields(Object actual, String nonNullField, List<String> ignoredFields) { super(EXPECTED_SINGLE + EXCLUDING, actual, nonNullField, ignoredFields); } public static ShouldHaveAllNullFields shouldHaveAllNullFields(Object actual, List<String> nonNullFields, List<String> ignoredFields) { if (nonNullFields.size() == 1) { if (ignoredFields.isEmpty()) { return new ShouldHaveAllNullFields(actual, nonNullFields.get(0)); } return new ShouldHaveAllNullFields(actual, nonNullFields.get(0), ignoredFields); } return ignoredFields.isEmpty() ? new ShouldHaveAllNullFields(actual, nonNullFields) : new ShouldHaveAllNullFields(actual, nonNullFields, ignoredFields); } }
package ooga.view.gamescreen; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javafx.beans.binding.Bindings; import javafx.beans.binding.NumberBinding; import javafx.scene.image.ImageView; import javafx.scene.layout.Pane; import javafx.util.Pair; import ooga.cardtable.ICell; import ooga.cardtable.IMove; import ooga.cardtable.IOffset; import ooga.cardtable.Move; import ooga.cardtable.Offset; import ooga.controller.Controller.GiveMove; import ooga.data.style.ICoordinate; import ooga.data.style.ILayout; public class DisplayTable { private static final double DECIMAL_TO_PERCENT = 100; private Pane myPane; private NumberBinding myCardHeight; private NumberBinding myCardWidth; private double faceUpCardOffset; private double faceDownCardOffset; private Map<String, Pair<NumberBinding, NumberBinding>> myCellNameToLocation; @FunctionalInterface interface MyDragInterface { void returnSelectedDisplayCell(DisplayCell selectedCell); } @FunctionalInterface interface MyClickInterface { void returnSelectedDisplayCell(DisplayCell selectedCell); } List<DisplayCell> myDisplayCellData = new ArrayList<>(); MyDragInterface getDraggedCell; MyClickInterface getClickedCell; DisplayCell myMovedDisplayCell; ICell myMover; ICell myDonor; ICell myRecipient; IMove myMove; String mySkinType; public DisplayTable(int gameID, GiveMove moveLambda, ILayout layout, double screenWidth, String skinType) { mySkinType = skinType; myPane = new Pane(); myCardHeight = Bindings.multiply(layout.getCardHeightRatio(), myPane.widthProperty()); myCardWidth = Bindings.multiply(layout.getCardWidthRatio(), myPane.widthProperty()); faceUpCardOffset = layout.getUpOffsetRatio() * screenWidth; faceDownCardOffset = layout.getDownOffsetRatio() * screenWidth; myCellNameToLocation = new HashMap<>(); Map<String, ICoordinate> locations = layout.getCellLayout(); for (String key : locations.keySet()) { NumberBinding x = Bindings .divide(Bindings.multiply(myPane.widthProperty(), locations.get(key).getX()), DECIMAL_TO_PERCENT); NumberBinding y = Bindings .divide(Bindings.multiply(myPane.widthProperty(), locations.get(key).getY()), DECIMAL_TO_PERCENT); myCellNameToLocation.put(key, new Pair<>(x, y)); } initializeDraggedCell(gameID, moveLambda); initializeClickedCell(gameID, moveLambda); } private void initializeDraggedCell(int gameID, GiveMove moveLambda) { getDraggedCell = (DisplayCell selectedCell) -> { myMovedDisplayCell = selectedCell; if (checkMove()) { moveLambda.sendMove(myMove, gameID); } }; } private void initializeClickedCell(int gameID, GiveMove moveLambda) { getClickedCell = (DisplayCell selectedCell) -> { IMove clickMove = new Move(selectedCell.getCell(), selectedCell.getCell(), selectedCell.getCell()); moveLambda.sendMove(clickMove, gameID); }; } private boolean checkMove() { DisplayCell intersectedCell = checkIntersections(); if (intersectedCell != myMovedDisplayCell) { myMover = myMovedDisplayCell.getCell(); myDonor = myMovedDisplayCell.getCell().findHead(); myRecipient = intersectedCell.getCell().findLeaf(); myMove = new Move(myDonor, myMover, myRecipient); } return intersectedCell != myMovedDisplayCell; } private DisplayCell checkIntersections() { boolean isIntersection = false; ImageView movedImage = myMovedDisplayCell.getImageView(); for (DisplayCell dc : myDisplayCellData) { ImageView otherImage = dc.getImageView(); if (!myMovedDisplayCell.getCell().findHead().getName() .equals(dc.getCell().findHead().getName())) { isIntersection = checkIntersection(movedImage, otherImage); } if (isIntersection) { return dc; } } return myMovedDisplayCell; } private boolean checkIntersection(ImageView a, ImageView b) { return a != null && b != null && a.getBoundsInParent().intersects(b.getBoundsInParent()); } public Pane getPane() { return myPane; } public void updateCells(Map<String, ICell> cellData) { myPane.getChildren().clear(); myDisplayCellData.clear(); List<DisplayCell> displayCellData = makeDisplayCells(cellData); drawDisplayCells(displayCellData); } public void updateTheseCells(Map<String, ICell> cellData) { clearTheseCells(cellData); List<DisplayCell> displayCellData = makeDisplayCells(cellData); drawDisplayCells(displayCellData); } private void clearTheseCells(Map<String, ICell> cellData) { List<DisplayCell> copyDisplayCellData = new ArrayList<>(myDisplayCellData); for (ICell c : cellData.values()) { for (DisplayCell dc : copyDisplayCellData) { if (c.getName().equals(dc.getCell().getName())) { clearDisplayCell(dc); break; } } } copyDisplayCellData.clear(); } private void clearDisplayCell(DisplayCell dc) { myDisplayCellData.remove(dc); if (dc.getImageView() == null) { // do nothing } else { dc.getImageView().setImage(null); myPane.getChildren().remove(dc.getImageView()); } for (IOffset dir : dc.getCell().getAllChildren().keySet()) { if (dir == Offset.NONE) { continue; } clearDisplayCell(dc.getAllChildren().get(dir)); } } private List<DisplayCell> makeDisplayCells(Map<String, ICell> cellData) { List<DisplayCell> displayCellData = new ArrayList<>(); for (String c : cellData.keySet()) { displayCellData.add(makeDisplayCell(c, cellData.get(c))); } return displayCellData; } private DisplayCell makeDisplayCell(String key, ICell cell) { Pair<NumberBinding, NumberBinding> location = myCellNameToLocation.get(key); return new DisplayCell(getDraggedCell, getClickedCell, cell, mySkinType, location, myCardHeight, myCardWidth, faceDownCardOffset, faceUpCardOffset); } private void drawDisplayCells(List<DisplayCell> DisplayCellData) { for (DisplayCell dc : DisplayCellData) { drawDisplayCell(dc); } } private void drawDisplayCell(DisplayCell rootDispCell) { if (rootDispCell == null) { return; } myDisplayCellData.add(rootDispCell); myPane.getChildren().add(rootDispCell.getImageView()); for (IOffset dir : rootDispCell.getCell().getAllChildren().keySet()) { if (dir == Offset.NONE) { continue; } drawDisplayCell(rootDispCell.getAllChildren().get(dir)); } } }
/* * Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tencentcloudapi.ms.v20180408.models; import com.tencentcloudapi.common.AbstractModel; import com.google.gson.annotations.SerializedName; import com.google.gson.annotations.Expose; import java.util.HashMap; public class ScanSetInfo extends AbstractModel{ /** * 任务状态: 1-已完成,2-处理中,3-处理出错,4-处理超时 */ @SerializedName("TaskStatus") @Expose private Long TaskStatus; /** * app信息 */ @SerializedName("AppDetailInfo") @Expose private AppDetailInfo AppDetailInfo; /** * 病毒信息 */ @SerializedName("VirusInfo") @Expose private VirusInfo VirusInfo; /** * 漏洞信息 */ @SerializedName("VulInfo") @Expose private VulInfo VulInfo; /** * 广告插件信息 */ @SerializedName("AdInfo") @Expose private AdInfo AdInfo; /** * 提交扫描的时间 */ @SerializedName("TaskTime") @Expose private Long TaskTime; /** * 状态码,成功返回0,失败返回错误码 */ @SerializedName("StatusCode") @Expose private Long StatusCode; /** * 状态描述 */ @SerializedName("StatusDesc") @Expose private String StatusDesc; /** * 状态操作指引 */ @SerializedName("StatusRef") @Expose private String StatusRef; /** * 系统权限信息 */ @SerializedName("PermissionInfo") @Expose private ScanPermissionList PermissionInfo; /** * 敏感词列表 */ @SerializedName("SensitiveInfo") @Expose private ScanSensitiveList SensitiveInfo; /** * Get 任务状态: 1-已完成,2-处理中,3-处理出错,4-处理超时 * @return TaskStatus 任务状态: 1-已完成,2-处理中,3-处理出错,4-处理超时 */ public Long getTaskStatus() { return this.TaskStatus; } /** * Set 任务状态: 1-已完成,2-处理中,3-处理出错,4-处理超时 * @param TaskStatus 任务状态: 1-已完成,2-处理中,3-处理出错,4-处理超时 */ public void setTaskStatus(Long TaskStatus) { this.TaskStatus = TaskStatus; } /** * Get app信息 * @return AppDetailInfo app信息 */ public AppDetailInfo getAppDetailInfo() { return this.AppDetailInfo; } /** * Set app信息 * @param AppDetailInfo app信息 */ public void setAppDetailInfo(AppDetailInfo AppDetailInfo) { this.AppDetailInfo = AppDetailInfo; } /** * Get 病毒信息 * @return VirusInfo 病毒信息 */ public VirusInfo getVirusInfo() { return this.VirusInfo; } /** * Set 病毒信息 * @param VirusInfo 病毒信息 */ public void setVirusInfo(VirusInfo VirusInfo) { this.VirusInfo = VirusInfo; } /** * Get 漏洞信息 * @return VulInfo 漏洞信息 */ public VulInfo getVulInfo() { return this.VulInfo; } /** * Set 漏洞信息 * @param VulInfo 漏洞信息 */ public void setVulInfo(VulInfo VulInfo) { this.VulInfo = VulInfo; } /** * Get 广告插件信息 * @return AdInfo 广告插件信息 */ public AdInfo getAdInfo() { return this.AdInfo; } /** * Set 广告插件信息 * @param AdInfo 广告插件信息 */ public void setAdInfo(AdInfo AdInfo) { this.AdInfo = AdInfo; } /** * Get 提交扫描的时间 * @return TaskTime 提交扫描的时间 */ public Long getTaskTime() { return this.TaskTime; } /** * Set 提交扫描的时间 * @param TaskTime 提交扫描的时间 */ public void setTaskTime(Long TaskTime) { this.TaskTime = TaskTime; } /** * Get 状态码,成功返回0,失败返回错误码 * @return StatusCode 状态码,成功返回0,失败返回错误码 */ public Long getStatusCode() { return this.StatusCode; } /** * Set 状态码,成功返回0,失败返回错误码 * @param StatusCode 状态码,成功返回0,失败返回错误码 */ public void setStatusCode(Long StatusCode) { this.StatusCode = StatusCode; } /** * Get 状态描述 * @return StatusDesc 状态描述 */ public String getStatusDesc() { return this.StatusDesc; } /** * Set 状态描述 * @param StatusDesc 状态描述 */ public void setStatusDesc(String StatusDesc) { this.StatusDesc = StatusDesc; } /** * Get 状态操作指引 * @return StatusRef 状态操作指引 */ public String getStatusRef() { return this.StatusRef; } /** * Set 状态操作指引 * @param StatusRef 状态操作指引 */ public void setStatusRef(String StatusRef) { this.StatusRef = StatusRef; } /** * Get 系统权限信息 * @return PermissionInfo 系统权限信息 */ public ScanPermissionList getPermissionInfo() { return this.PermissionInfo; } /** * Set 系统权限信息 * @param PermissionInfo 系统权限信息 */ public void setPermissionInfo(ScanPermissionList PermissionInfo) { this.PermissionInfo = PermissionInfo; } /** * Get 敏感词列表 * @return SensitiveInfo 敏感词列表 */ public ScanSensitiveList getSensitiveInfo() { return this.SensitiveInfo; } /** * Set 敏感词列表 * @param SensitiveInfo 敏感词列表 */ public void setSensitiveInfo(ScanSensitiveList SensitiveInfo) { this.SensitiveInfo = SensitiveInfo; } public ScanSetInfo() { } /** * NOTE: Any ambiguous key set via .set("AnyKey", "value") will be a shallow copy, * and any explicit key, i.e Foo, set via .setFoo("value") will be a deep copy. */ public ScanSetInfo(ScanSetInfo source) { if (source.TaskStatus != null) { this.TaskStatus = new Long(source.TaskStatus); } if (source.AppDetailInfo != null) { this.AppDetailInfo = new AppDetailInfo(source.AppDetailInfo); } if (source.VirusInfo != null) { this.VirusInfo = new VirusInfo(source.VirusInfo); } if (source.VulInfo != null) { this.VulInfo = new VulInfo(source.VulInfo); } if (source.AdInfo != null) { this.AdInfo = new AdInfo(source.AdInfo); } if (source.TaskTime != null) { this.TaskTime = new Long(source.TaskTime); } if (source.StatusCode != null) { this.StatusCode = new Long(source.StatusCode); } if (source.StatusDesc != null) { this.StatusDesc = new String(source.StatusDesc); } if (source.StatusRef != null) { this.StatusRef = new String(source.StatusRef); } if (source.PermissionInfo != null) { this.PermissionInfo = new ScanPermissionList(source.PermissionInfo); } if (source.SensitiveInfo != null) { this.SensitiveInfo = new ScanSensitiveList(source.SensitiveInfo); } } /** * Internal implementation, normal users should not use it. */ public void toMap(HashMap<String, String> map, String prefix) { this.setParamSimple(map, prefix + "TaskStatus", this.TaskStatus); this.setParamObj(map, prefix + "AppDetailInfo.", this.AppDetailInfo); this.setParamObj(map, prefix + "VirusInfo.", this.VirusInfo); this.setParamObj(map, prefix + "VulInfo.", this.VulInfo); this.setParamObj(map, prefix + "AdInfo.", this.AdInfo); this.setParamSimple(map, prefix + "TaskTime", this.TaskTime); this.setParamSimple(map, prefix + "StatusCode", this.StatusCode); this.setParamSimple(map, prefix + "StatusDesc", this.StatusDesc); this.setParamSimple(map, prefix + "StatusRef", this.StatusRef); this.setParamObj(map, prefix + "PermissionInfo.", this.PermissionInfo); this.setParamObj(map, prefix + "SensitiveInfo.", this.SensitiveInfo); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.cloud.overseer; import java.util.Collections; import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.cloud.DistribStateManager; import org.apache.solr.client.solrj.cloud.SolrCloudManager; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.ImplicitDocRouter; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkNodeProps; import org.junit.BeforeClass; import static org.mockito.Mockito.*; public class TestClusterStateMutator extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() { assumeWorkingMockito(); } public void testCreateCollection() throws Exception { ClusterState clusterState = new ClusterState(Collections.<String>emptySet(), Collections.<String, DocCollection>emptyMap()); DistribStateManager mockStateManager = mock(DistribStateManager.class); SolrCloudManager dataProvider = mock(SolrCloudManager.class); when(dataProvider.getDistribStateManager()).thenReturn(mockStateManager); ClusterStateMutator mutator = new ClusterStateMutator(dataProvider); ZkNodeProps message = new ZkNodeProps(Map.of( "name", "xyz", "numShards", "1" )); ZkWriteCommand cmd = mutator.createCollection(clusterState, message); DocCollection collection = cmd.collection; assertEquals("xyz", collection.getName()); assertEquals(1, collection.getSlicesMap().size()); ClusterState state = new ClusterState(Collections.<String>emptySet(), Collections.singletonMap("xyz", collection)); message = new ZkNodeProps(Map.of( "name", "abc", "numShards", "2", "router.name", "implicit", "shards", "x,y", "replicationFactor", "3" )); cmd = mutator.createCollection(state, message); collection = cmd.collection; assertEquals("abc", collection.getName()); assertEquals(2, collection.getSlicesMap().size()); assertNotNull(collection.getSlicesMap().get("x")); assertNotNull(collection.getSlicesMap().get("y")); assertNull(collection.getSlicesMap().get("x").getRange()); assertNull(collection.getSlicesMap().get("y").getRange()); assertSame(Slice.State.ACTIVE, collection.getSlicesMap().get("x").getState()); assertSame(Slice.State.ACTIVE, collection.getSlicesMap().get("y").getState()); assertEquals(ImplicitDocRouter.class, collection.getRouter().getClass()); assertNotNull(state.getCollectionOrNull("xyz")); // we still have the old collection } }
package com.softwareverde.bitcoin.server.module.stratum.api.endpoint.account; import com.softwareverde.bitcoin.address.Address; import com.softwareverde.bitcoin.address.AddressInflater; import com.softwareverde.bitcoin.miner.pool.AccountId; import com.softwareverde.bitcoin.server.configuration.StratumProperties; import com.softwareverde.bitcoin.server.database.DatabaseConnection; import com.softwareverde.bitcoin.server.database.DatabaseConnectionFactory; import com.softwareverde.bitcoin.server.module.stratum.api.endpoint.StratumApiResult; import com.softwareverde.bitcoin.server.module.stratum.database.AccountDatabaseManager; import com.softwareverde.database.DatabaseException; import com.softwareverde.http.HttpMethod; import com.softwareverde.http.querystring.GetParameters; import com.softwareverde.http.querystring.PostParameters; import com.softwareverde.http.server.servlet.request.Request; import com.softwareverde.http.server.servlet.response.JsonResponse; import com.softwareverde.http.server.servlet.response.Response; import com.softwareverde.logging.Logger; import com.softwareverde.servlet.AuthenticatedServlet; public class PayoutAddressApi extends AuthenticatedServlet { protected final DatabaseConnectionFactory _databaseConnectionFactory; public PayoutAddressApi(final StratumProperties stratumProperties, final DatabaseConnectionFactory databaseConnectionFactory) { super(stratumProperties); _databaseConnectionFactory = databaseConnectionFactory; } @Override protected Response _onAuthenticatedRequest(final AccountId accountId, final Request request) { final GetParameters getParameters = request.getGetParameters(); final PostParameters postParameters = request.getPostParameters(); if (request.getMethod() == HttpMethod.GET) { // GET PAYOUT ADDRESS // Requires GET: // Requires POST: try (final DatabaseConnection databaseConnection = _databaseConnectionFactory.newConnection()) { final AccountDatabaseManager accountDatabaseManager = new AccountDatabaseManager(databaseConnection); final Address address = accountDatabaseManager.getPayoutAddress(accountId); final StratumApiResult apiResult = new StratumApiResult(true, null); apiResult.put("address", (address != null ? address.toBase58CheckEncoded() : null)); return new JsonResponse(Response.Codes.OK, apiResult); } catch (final DatabaseException exception) { Logger.warn(exception); return new JsonResponse(Response.Codes.SERVER_ERROR, new StratumApiResult(false, "An internal error occurred.")); } } else if (request.getMethod() == HttpMethod.POST) { // SET PAYOUT ADDRESS // Requires GET: // Requires POST: address final AddressInflater addressInflater = new AddressInflater(); final String addressString = postParameters.get("address"); final Address address; if (! addressString.isEmpty()) { address = addressInflater.uncompressedFromBase58Check(addressString); if (address == null) { return new JsonResponse(Response.Codes.BAD_REQUEST, new StratumApiResult(false, "Invalid address.")); } } else { address = null; } try (final DatabaseConnection databaseConnection = _databaseConnectionFactory.newConnection()) { final AccountDatabaseManager accountDatabaseManager = new AccountDatabaseManager(databaseConnection); accountDatabaseManager.setPayoutAddress(accountId, address); return new JsonResponse(Response.Codes.OK, new StratumApiResult(true, null)); } catch (final DatabaseException exception) { Logger.warn(exception); return new JsonResponse(Response.Codes.SERVER_ERROR, new StratumApiResult(false, "An internal error occurred.")); } } else { return new JsonResponse(Response.Codes.BAD_REQUEST, new StratumApiResult(false, "Invalid method.")); } } }
/* * Copyright (c) 2002-2021 Gargoyle Software Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gargoylesoftware.htmlunit.html; import static com.gargoylesoftware.htmlunit.BrowserVersionFeatures.EVENT_ONCHANGE_AFTER_ONCLICK; import static com.gargoylesoftware.htmlunit.BrowserVersionFeatures.HTMLINPUT_CHECKBOX_DOES_NOT_CLICK_SURROUNDING_ANCHOR; import java.io.IOException; import java.util.Map; import com.gargoylesoftware.htmlunit.Page; import com.gargoylesoftware.htmlunit.ScriptResult; import com.gargoylesoftware.htmlunit.SgmlPage; import com.gargoylesoftware.htmlunit.javascript.host.event.Event; /** * Wrapper for the HTML element "input". * * @author <a href="mailto:mbowler@GargoyleSoftware.com">Mike Bowler</a> * @author David K. Taylor * @author <a href="mailto:chen_jun@users.sourceforge.net">Jun Chen</a> * @author <a href="mailto:cse@dynabean.de">Christian Sell</a> * @author Marc Guillemot * @author Mike Bresnahan * @author Daniel Gredler * @author Ahmed Ashour * @author Ronald Brill * @author Frank Danek */ public class HtmlCheckBoxInput extends HtmlInput implements LabelableElement { /** * Value to use if no specified <tt>value</tt> attribute. */ private static final String DEFAULT_VALUE = "on"; private boolean defaultCheckedState_; private boolean checkedState_; /** * Creates an instance. * If no value is specified, it is set to "on" as browsers do * even if spec says that it is not allowed * (<a href="http://www.w3.org/TR/REC-html40/interact/forms.html#adef-value-INPUT">W3C</a>). * * @param qualifiedName the qualified name of the element type to instantiate * @param page the page that contains this element * @param attributes the initial attributes */ HtmlCheckBoxInput(final String qualifiedName, final SgmlPage page, final Map<String, DomAttr> attributes) { super(qualifiedName, page, addValueIfNeeded(page, attributes)); // fix the default value in case we have set it if (getAttributeDirect("value") == DEFAULT_VALUE) { setDefaultValue(ATTRIBUTE_NOT_DEFINED, false); } defaultCheckedState_ = hasAttribute("checked"); checkedState_ = defaultCheckedState_; } /** * Add missing attribute if needed by fixing attribute map rather to add it afterwards as this second option * triggers the instantiation of the script object at a time where the DOM node has not yet been added to its * parent. */ private static Map<String, DomAttr> addValueIfNeeded(final SgmlPage page, final Map<String, DomAttr> attributes) { for (final String key : attributes.keySet()) { if ("value".equalsIgnoreCase(key)) { return attributes; // value attribute was specified } } // value attribute was not specified, add it final DomAttr newAttr = new DomAttr(page, null, "value", DEFAULT_VALUE, true); attributes.put("value", newAttr); return attributes; } /** * {@inheritDoc} * * @see SubmittableElement#reset() */ @Override public void reset() { setChecked(defaultCheckedState_); } /** * Returns {@code true} if this element is currently selected. * @return {@code true} if this element is currently selected */ @Override public boolean isChecked() { return checkedState_; } /** * {@inheritDoc} */ @Override public Page setChecked(final boolean isChecked) { checkedState_ = isChecked; return executeOnChangeHandlerIfAppropriate(this); } /** * {@inheritDoc} */ @Override protected boolean doClickStateUpdate(final boolean shiftKey, final boolean ctrlKey) throws IOException { checkedState_ = !isChecked(); super.doClickStateUpdate(shiftKey, ctrlKey); return true; } /** * {@inheritDoc} */ @Override protected ScriptResult doClickFireClickEvent(final Event event) { if (!hasFeature(EVENT_ONCHANGE_AFTER_ONCLICK)) { executeOnChangeHandlerIfAppropriate(this); } return super.doClickFireClickEvent(event); } /** * {@inheritDoc} */ @Override protected void doClickFireChangeEvent() { if (hasFeature(EVENT_ONCHANGE_AFTER_ONCLICK)) { executeOnChangeHandlerIfAppropriate(this); } } /** * First update the internal state of checkbox and then handle "onclick" event. * {@inheritDoc} */ @Override protected boolean isStateUpdateFirst() { return true; } /** * {@inheritDoc} */ @Override protected void preventDefault() { checkedState_ = !checkedState_; } /** * {@inheritDoc} Also sets the value to the new default value. * @see SubmittableElement#setDefaultValue(String) */ @Override public void setDefaultValue(final String defaultValue) { super.setDefaultValue(defaultValue); setValueAttribute(defaultValue); } /** * {@inheritDoc} * @see SubmittableElement#setDefaultChecked(boolean) */ @Override public void setDefaultChecked(final boolean defaultChecked) { defaultCheckedState_ = defaultChecked; setChecked(defaultChecked); } /** * {@inheritDoc} * @see SubmittableElement#isDefaultChecked() */ @Override public boolean isDefaultChecked() { return defaultCheckedState_; } @Override Object getInternalValue() { return isChecked(); } @Override void handleFocusLostValueChanged() { } /** * {@inheritDoc} */ @Override protected void setAttributeNS(final String namespaceURI, final String qualifiedName, final String attributeValue, final boolean notifyAttributeChangeListeners, final boolean notifyMutationObservers) { if ("value".equals(qualifiedName)) { setDefaultValue(attributeValue, false); } if ("checked".equals(qualifiedName)) { checkedState_ = true; } super.setAttributeNS(namespaceURI, qualifiedName, attributeValue, notifyAttributeChangeListeners, notifyMutationObservers); } /** * {@inheritDoc} */ @Override protected boolean propagateClickStateUpdateToParent() { return !hasFeature(HTMLINPUT_CHECKBOX_DOES_NOT_CLICK_SURROUNDING_ANCHOR) && super.propagateClickStateUpdateToParent(); } }
package jadx.plugins.input.javaconvert; import java.nio.file.Path; import java.util.List; import jadx.api.plugins.JadxPluginInfo; import jadx.api.plugins.input.JadxInputPlugin; import jadx.api.plugins.input.data.ILoadResult; import jadx.api.plugins.input.data.impl.EmptyLoadResult; import jadx.plugins.input.dex.DexInputPlugin; public class JavaConvertPlugin implements JadxInputPlugin { @Override public JadxPluginInfo getPluginInfo() { return new JadxPluginInfo("java-convert", "JavaConvert", "Convert .jar and .class files to dex"); } @Override public ILoadResult loadFiles(List<Path> input) { ConvertResult result = JavaConvertLoader.process(input); if (result.isEmpty()) { result.close(); return EmptyLoadResult.INSTANCE; } return DexInputPlugin.loadDexFiles(result.getConverted(), result); } }
package stsjorbsmod.cards; import com.megacrit.cardcrawl.actions.AbstractGameAction.AttackEffect; import com.megacrit.cardcrawl.actions.common.DamageAction; import com.megacrit.cardcrawl.cards.DamageInfo; import com.megacrit.cardcrawl.characters.AbstractPlayer; import com.megacrit.cardcrawl.monsters.AbstractMonster; import stsjorbsmod.JorbsMod; import stsjorbsmod.characters.Wanderer; import stsjorbsmod.memories.MemoryUtils; import static stsjorbsmod.JorbsMod.makeCardPath; public class Hurt extends CustomJorbsModCard { public static final String ID = JorbsMod.makeID(Hurt.class.getSimpleName()); public static final String IMG = makeCardPath("Damage_Uncommons/hurt.png"); private static final CardRarity RARITY = CardRarity.UNCOMMON; private static final CardTarget TARGET = CardTarget.ENEMY; private static final CardType TYPE = CardType.ATTACK; public static final CardColor COLOR = Wanderer.Enums.COLOR_GRAY; private static final int COST = 1; private static final int DAMAGE = 14; private static final int UPGRADE_PLUS_DMG = 14; private static final int HP_LOSS_PER_CLARITY = 1; private static final int UPGRADE_PLUS_HP_LOSS_PER_CLARITY = 1; public Hurt() { super(ID, IMG, COST, TYPE, COLOR, RARITY, TARGET); baseDamage = DAMAGE; magicNumber = baseMagicNumber = HP_LOSS_PER_CLARITY; } @Override public void use(AbstractPlayer p, AbstractMonster m) { enqueueAction(new DamageAction(m, new DamageInfo(p, damage), AttackEffect.SLASH_HEAVY)); int hpLoss = MemoryUtils.countClarities(p) * magicNumber; if (hpLoss > 0) { enqueueAction( new DamageAction(p, new DamageInfo(p, hpLoss, DamageInfo.DamageType.HP_LOSS), AttackEffect.SHIELD)); } } @Override public void upgrade() { if (!upgraded) { upgradeName(); upgradeDamage(UPGRADE_PLUS_DMG); upgradeMagicNumber(UPGRADE_PLUS_HP_LOSS_PER_CLARITY); initializeDescription(); } } }
package org.basex.query.up.primitives; import org.basex.data.*; import org.basex.data.atomic.*; import org.basex.query.up.*; import org.basex.query.util.*; import org.basex.query.value.item.*; import org.basex.query.value.node.*; import org.basex.query.value.type.*; import org.basex.util.*; /** * Abstract update primitive which holds a copy of nodes to be inserted. * * @author BaseX Team 2005-12, BSD License * @author Lukas Kircher */ public abstract class NodeCopy extends UpdatePrimitive { /** Nodes to be inserted. */ ANodeList insert; /** Insertion sequence data clip. */ DataClip insseq; /** * Constructor. * @param t type * @param p target node pre value * @param d data * @param i input info * @param n node copy */ NodeCopy(final PrimitiveType t, final int p, final Data d, final InputInfo i, final ANodeList n) { super(t, p, d, i); insert = n; } /** * Prepares this update primitive before execution. This includes e.g. the * preparation of insertion sequences. * @param tmp temporary database */ public final void prepare(final MemData tmp) { // merge texts. after that, text nodes still need to be merged, // as two adjacent iterators may lead to two adjacent text nodes final ANodeList list = mergeNodeCacheText(insert); insert = null; // build main memory representation of nodes to be copied final int start = tmp.meta.size; new DataBuilder(tmp).build(list); insseq = new DataClip(tmp, start, tmp.meta.size); insseq.fragments = list.size(); } /** * Adds top entries from the temporary data instance to the name pool, * which is used for finding duplicate attributes and namespace conflicts. * @param pool name pool */ final void add(final NamePool pool) { final Data d = insseq.data; final int ps = insseq.start; for(int p = ps; p < insseq.end; ++p) { final int k = d.kind(p); if(k != Data.ATTR && k != Data.ELEM || d.parent(p, k) >= ps) continue; final int u = d.uri(p, k); final QNm qnm = new QNm(d.name(p, k)); if(u != 0) qnm.uri(d.nspaces.uri(u)); pool.add(qnm, ANode.type(k)); } } /** * Merges all adjacent text nodes in the given sequence. * @param nl iterator * @return iterator with merged text nodes */ private static ANodeList mergeNodeCacheText(final ANodeList nl) { final int ns = nl.size(); if(ns == 0) return nl; final ANodeList s = new ANodeList(ns); ANode n = nl.get(0); for(int c = 0; c < ns;) { if(n.type == NodeType.TXT) { final TokenBuilder tb = new TokenBuilder(); while(n.type == NodeType.TXT) { tb.add(n.string()); if(++c == ns) break; n = nl.get(c); } s.add(new FTxt(tb.finish())); } else { s.add(n); if(++c < ns) n = nl.get(c); } } return s; } @Override public final int size() { return insseq.fragments; } @Override public final String toString() { return Util.name(this) + '[' + getTargetNode() + ", " + size() + " ops]"; } }
package com.flipkart.android.proteus.support.view; import android.content.Context; import android.view.View; import androidx.annotation.NonNull; import androidx.appcompat.widget.AppCompatSpinner; import com.flipkart.android.proteus.ProteusView; import com.flipkart.android.proteus.toolbox.Attributes; import com.flipkart.android.proteus.value.ObjectValue; import com.flipkart.android.proteus.value.Value; /** * Created by Prasad Rao on 29-04-2020 12:53 **/ public class ProteusSpinner extends AppCompatSpinner implements ProteusView { private Manager manager; public ProteusSpinner(Context context) { super(context); } @Override public Manager getViewManager() { return manager; } @Override public void setViewManager(@NonNull Manager manager) { this.manager = manager; } @NonNull @Override public View getAsView() { return this; } @Override public Value getSelectedItem() { ObjectValue selectedItem = (ObjectValue) super.getSelectedItem(); return selectedItem.get(Attributes.Spinner.item); } }
package org.apache.spark.sql.execution.command; public class ShowPartitionsCommand$ extends scala.runtime.AbstractFunction2<org.apache.spark.sql.catalyst.TableIdentifier, scala.Option<scala.collection.immutable.Map<java.lang.String, java.lang.String>>, org.apache.spark.sql.execution.command.ShowPartitionsCommand> implements scala.Serializable { /** * Static reference to the singleton instance of this Scala object. */ public static final ShowPartitionsCommand$ MODULE$ = null; public ShowPartitionsCommand$ () { throw new RuntimeException(); } }
package org.opensha.refFaultParamDb.calc.sectionDists; import java.io.IOException; import java.util.ArrayList; import java.util.EmptyStackException; import java.util.HashMap; import java.util.Stack; import org.opensha.commons.util.FileUtils; import org.opensha.commons.util.threads.Task; import org.opensha.commons.util.threads.ThreadedTaskComputer; import org.opensha.refFaultParamDb.vo.FaultSectionPrefData; import org.opensha.sha.earthquake.rupForecastImpl.WGCEP_UCERF_2_Final.data.finalReferenceFaultParamDb.DeformationModelPrefDataFinal; import org.opensha.sha.faultSurface.EvenlyGriddedSurface; import org.opensha.sha.faultSurface.FrankelGriddedSurface; import org.opensha.sha.faultSurface.SimpleFaultData; public class FaultSectDistCalculator implements Runnable { private ArrayList<Integer> sectionIDs; private ArrayList<EvenlyGriddedSurface> surfaces; private HashMap<Pairing, FaultSectDistRecord> records; // these are only used for threaded calcs private Stack<FaultSectDistRecord> calcStack; private boolean fast; private double calcTimeSecs; private double pairTimeSecs; public FaultSectDistCalculator( double disc, boolean fast, DeformationModelPrefDataFinal deformationModelPrefDB, int deformationModelId) { this(disc, fast, deformationModelPrefDB.getAllFaultSectionPrefData(deformationModelId)); } public FaultSectDistCalculator( double disc, boolean fast, ArrayList<FaultSectionPrefData> data) { this(fast, createSurfaces(disc, data), getIDs(data)); } public FaultSectDistCalculator(boolean fast, ArrayList<EvenlyGriddedSurface> surfaces, ArrayList<Integer> ids) { this.fast = fast; this.surfaces = surfaces; this.sectionIDs = ids; } private static ArrayList<Integer> getIDs(ArrayList<FaultSectionPrefData> data) { ArrayList<Integer> sectionIDs = new ArrayList<Integer>(); for (FaultSectionPrefData val : data) { sectionIDs.add(val.getSectionId()); } return sectionIDs; } private static ArrayList<EvenlyGriddedSurface> createSurfaces(double disc, ArrayList<FaultSectionPrefData> data) { ArrayList<EvenlyGriddedSurface> surfaces = new ArrayList<EvenlyGriddedSurface>(); for (FaultSectionPrefData section : data) { SimpleFaultData simpleFaultData = section.getSimpleFaultData(false); FrankelGriddedSurface surface = new FrankelGriddedSurface(simpleFaultData, disc); surfaces.add(surface); } return surfaces; } public void calcDistances() { long start = System.currentTimeMillis(); for (FaultSectDistRecord record : records.values()) { record.calcDistances(fast); } calcTimeSecs = (System.currentTimeMillis() - start) / 1000d; } public void calcDistances(int numThreads) throws InterruptedException { Stack<Task> tasks = new Stack<Task>(); for (FaultSectDistRecord record : records.values()) tasks.push(new CalcTask(record, fast)); ThreadedTaskComputer threaded = new ThreadedTaskComputer(tasks, true); long start = System.currentTimeMillis(); threaded.computeThreaded(numThreads); calcTimeSecs = (System.currentTimeMillis() - start) / 1000d; } public void createPairings() { createPairings(null, -1.0); } public void createPairings(SurfaceFilter filter, double filterDist) { long start = System.currentTimeMillis(); records = new HashMap<Pairing, FaultSectDistRecord>(); for (int i=0; i<surfaces.size(); i++) { EvenlyGriddedSurface surface1 = surfaces.get(i); for (int j=0; j<surfaces.size(); j++) { EvenlyGriddedSurface surface2 = surfaces.get(j); if (surface1 == surface2) continue; int id1 = sectionIDs.get(i); int id2 = sectionIDs.get(j); if (id1 >= id2) continue; FaultSectDistRecord record = new FaultSectDistRecord(id1, surface1, id2, surface2); if (filter != null && record.calcMinCornerMidptDist(fast) > filter.getCornerMidptFilterDist()) continue; if (filter != null && filterDist > 0) { record.calcDistances(filter, fast); if (record.getMinDist() > filterDist) continue; } records.put(record.getPairing(), record); } } System.out.println("Created " + records.size() + " pairings!"); pairTimeSecs = (System.currentTimeMillis() - start) / 1000d; } public HashMap<Pairing, FaultSectDistRecord> getRecords() { return records; } /** * @param args * @throws IOException */ public static void main(String[] args) throws IOException { long start = System.currentTimeMillis(); int deformationModelId = 82; DeformationModelPrefDataFinal deformationModelPrefDB = new DeformationModelPrefDataFinal(); double disc = 1.0; double filterDist = 15; double cornerMidptFilterDist = 50; int outlineModulus = 4; int internalModulus = 5; SurfaceFilter filter = new SmartSurfaceFilter(outlineModulus, internalModulus, cornerMidptFilterDist); // double disc = 3.0; FaultSectDistCalculator calc = new FaultSectDistCalculator(disc, true, deformationModelPrefDB, deformationModelId); calc.createPairings(filter, filterDist); System.out.println("Pair time: " + calc.getPairTimeSecs()); int threads = Runtime.getRuntime().availableProcessors(); // if (filterDist > 0) // calc.filterOutByCornerMidptDistance(filterDist, true); // if (filterDist > 0) // calc.filterRecords(new SmartSurfaceFilter(outlineModulus, internalModulus), filterDist, true); System.out.println("Disc: " + disc + ", filter dist: " + filterDist + ", outline modulus: " + outlineModulus + ", internal modulus: " + internalModulus); System.out.println("Calculating with " + threads + " threads."); try { calc.calcDistances(threads); } catch (InterruptedException e) { e.printStackTrace(); } System.out.println("Calc time: " + calc.getCalcTimeSecs()); FileUtils.saveObjectInFile("faultSectDistances.obj", calc.getRecords()); int count = 0; for (FaultSectDistRecord record : calc.getRecords().values()) { if (record.getMinDist() < 10) count++; } System.out.println("Found " + count + " under cutoff!"); long end = System.currentTimeMillis(); System.out.println("Total time: " + ((end - start) / 1000d) + " secs"); } private synchronized FaultSectDistRecord getRecordToCalc() throws EmptyStackException { return calcStack.pop(); } @Override public void run() { while (true) { try { FaultSectDistRecord record = getRecordToCalc(); record.calcDistances(fast); } catch (EmptyStackException e) { break; } } } public void filterOutByCornerMidptDistance(double maxDist, boolean fast) { ArrayList<Pairing> toBeRemoved = new ArrayList<Pairing>(); for (Pairing pairing : records.keySet()) { FaultSectDistRecord record = records.get(pairing); double minDist = record.calcMinCornerMidptDist(fast); if (minDist > maxDist) toBeRemoved.add(pairing); } System.out.println("filtered out " + toBeRemoved.size() + "/" + records.size()); for (Pairing remove : toBeRemoved) { records.remove(remove); } } public void filterRecords(SurfaceFilter filter, double maxDist, boolean fast) { ArrayList<Pairing> toBeRemoved = new ArrayList<Pairing>(); for (Pairing pairing : records.keySet()) { FaultSectDistRecord record = records.get(pairing); double minDist = record.calcMinDist(filter, fast); if (minDist > maxDist) toBeRemoved.add(pairing); } System.out.println("filtered out " + toBeRemoved.size() + "/" + records.size()); for (Pairing remove : toBeRemoved) { records.remove(remove); } } public double getCalcTimeSecs() { return calcTimeSecs; } public double getPairTimeSecs() { return pairTimeSecs; } }
//@@author amrut-prabhu package seedu.club.commons.events.model; import static seedu.club.storage.ProfilePhotoStorage.PHOTO_FILE_EXTENSION; import seedu.club.commons.events.BaseEvent; /** * Indicates that the profile photo of a member has changed. */ public class ProfilePhotoChangedEvent extends BaseEvent { public final String originalPhotoPath; public final String newFileName; private boolean isPhotoChanged; public ProfilePhotoChangedEvent(String originalPhotoPath, String newFileName) { this.originalPhotoPath = originalPhotoPath; this.newFileName = newFileName; this.isPhotoChanged = true; } public boolean isPhotoChanged() { return isPhotoChanged; } public void setPhotoChanged(boolean isPhotoChanged) { this.isPhotoChanged = isPhotoChanged; } @Override public String toString() { return originalPhotoPath + " is being stored as " + newFileName + PHOTO_FILE_EXTENSION; } }
/* * Copyright (c) 2009, James Leigh All rights reserved. * Copyright (c) 2011, 3 Round Stones Inc. Some rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * - Neither the name of the openrdf.org nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * */ package org.openrdf.sail.optimistic; import java.io.File; import java.util.HashMap; import java.util.Map; import java.util.Set; import org.openrdf.model.URI; import org.openrdf.query.QueryLanguage; import org.openrdf.repository.RepositoryException; import org.openrdf.repository.query.NamedQuery; import org.openrdf.repository.query.NamedQueryRepository; import org.openrdf.repository.sail.SailRepository; import org.openrdf.repository.sail.SailRepositoryConnection; import org.openrdf.sail.Sail; import org.openrdf.sail.SailException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Allows concurrent write connections. Optionally enforces snapshot and * serializable transaction isolation. * * @author James Leigh * @author Steve Battle * */ public class OptimisticRepository extends SailRepository implements NamedQueryRepository { private final Logger logger = LoggerFactory.getLogger(OptimisticRepository.class) ; private OptimisticSail sail; private Map<URI, OptimisticNamedQuery> namedQueries = new HashMap<URI, OptimisticNamedQuery>() ; public OptimisticRepository(Sail sail) { super(new OptimisticSail(sail)); this.sail = (OptimisticSail) getSail(); } /** * @return <code>true</code> if read operations in a new connections will * operate on a single state of the store. */ public boolean isReadSnapshot() { return sail.isReadSnapshot(); } public void setReadSnapshot(boolean snapshot) { sail.setReadSnapshot(snapshot); } /** * @return <code>true</code> if the new connections will enforce snapshot * isolation. */ public boolean isSnapshot() { return sail.isSnapshot(); } public void setSnapshot(boolean snapshot) { sail.setSnapshot(snapshot); } /** * @return <code>true</code> if new connections will enforce serializable * isolation. */ public boolean isSerializable() { return sail.isSerializable(); } public void setSerializable(boolean serializable) { sail.setSerializable(serializable); } @Override public SailRepositoryConnection getConnection() throws RepositoryException { try { OptimisticConnection con = sail.getConnection(); return new AutoCommitRepositoryConnection(this, con); } catch (SailException e) { throw new RepositoryException(e); } } /* Methods supporting the NamedQueryRepository interface */ public synchronized NamedQuery createNamedQuery(URI uri, QueryLanguage ql, String queryString, String baseURI) throws RepositoryException { // allow existing mapping to be overwritten // but detach the old named query from the repository if (namedQueries.containsKey(uri)) { sail.removeSailChangedListener(namedQueries.get(uri)); } OptimisticNamedQuery nq; nq = new OptimisticNamedQuery(uri, ql, queryString, baseURI); namedQueries.put(uri, nq); sail.addSailChangedListener(nq); return nq; } public synchronized NamedQuery getNamedQuery(URI uri) { return namedQueries.get(uri); } public synchronized URI[] getNamedQueryIDs() { Set<URI> uris = namedQueries.keySet(); return uris.toArray(new URI[uris.size()]); } public synchronized void removeNamedQuery(URI uri) { OptimisticNamedQuery nq = namedQueries.get(uri); sail.removeSailChangedListener(nq); namedQueries.remove(uri); } /* Override initialize(), shutdown() to support persistence */ @Override protected synchronized void initializeInternal() throws RepositoryException { super.initializeInternal(); // persist stored named queries File dataDir = getDataDir(); if (dataDir != null && dataDir.isDirectory()) try { namedQueries = OptimisticNamedQuery.persist(dataDir); // attach persistent named queries to repository for (OptimisticNamedQuery nq : namedQueries.values()) { sail.addSailChangedListener(nq); } } catch (Exception e) { logger.error(e.toString(), e); } } /* Desist all active named queries */ @Override protected synchronized void shutDownInternal() throws RepositoryException { super.shutDownInternal(); // desist all named queries File dataDir = getDataDir(); if (dataDir != null && dataDir.isDirectory()) { OptimisticNamedQuery.desist(dataDir, namedQueries); } } }
/*----------------------------------------------------------------------------*/ /* Copyright (c) 2017-2018 FIRST. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package frc.robot; import edu.wpi.first.wpilibj.TimedRobot; import frc.robot.subsystems.Drivetrain; import frc.robot.subsystems.Floop; import frc.util.Gamepad; public class Robot extends TimedRobot { public static Drivetrain drivetrain; public static Floop floop; public static OI oi; @Override public void robotInit() { //creates new instance of Drivetrain object drivetrain = new Drivetrain(); floop = new Floop(); //makes new instance of OI (where buttons are connected with commands) oi = new OI(); } @Override public void robotPeriodic() { } @Override public void disabledInit() { } @Override public void disabledPeriodic() { } @Override public void autonomousInit() { } @Override public void autonomousPeriodic() { //prints out the current tick count from the encoder //this is using the method made in Drivetrain.java System.out.println(drivetrain.getEncoderTicks()); } @Override public void teleopInit() { } @Override public void teleopPeriodic() { } @Override public void testPeriodic() { } }
/* * (C) Copyright 2015-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Contributors: * ohun@live.cn (夜色) */ package com.mpush.core.server; import com.mpush.api.connection.Connection; import com.mpush.api.protocol.Command; import com.mpush.common.MessageDispatcher; import com.mpush.core.MPushServer; import com.mpush.core.handler.GatewayKickUserHandler; import com.mpush.core.handler.GatewayPushHandler; import com.mpush.netty.udp.UDPChannelHandler; import com.mpush.netty.udp.NettyUDPConnector; import com.mpush.tools.Utils; import com.mpush.tools.config.CC; import com.mpush.tools.config.CC.mp.net.rcv_buf; import com.mpush.tools.config.CC.mp.net.snd_buf; import io.netty.bootstrap.Bootstrap; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelOption; import static com.mpush.common.MessageDispatcher.POLICY_LOG; /** * Created by ohun on 2015/12/30. * * 网关udp连接器 * * @author ohun@live.cn */ public final class GatewayUDPConnector extends NettyUDPConnector { private UDPChannelHandler channelHandler; private MessageDispatcher messageDispatcher; private MPushServer mPushServer; public GatewayUDPConnector(MPushServer mPushServer) { super(CC.mp.net.gateway_server_port); this.mPushServer = mPushServer; this.messageDispatcher = new MessageDispatcher(POLICY_LOG); this.channelHandler = new UDPChannelHandler(messageDispatcher); } @Override public void init() { super.init(); messageDispatcher.register(Command.GATEWAY_PUSH, () -> new GatewayPushHandler(mPushServer.getPushCenter())); messageDispatcher.register(Command.GATEWAY_KICK, () -> new GatewayKickUserHandler(mPushServer.getRouterCenter())); channelHandler.setMulticastAddress(Utils.getInetAddress(CC.mp.net.gateway_server_multicast)); channelHandler.setNetworkInterface(Utils.getLocalNetworkInterface()); } @Override protected void initOptions(Bootstrap b) { super.initOptions(b); b.option(ChannelOption.IP_MULTICAST_LOOP_DISABLED, true);//默认情况下,当本机发送组播数据到某个网络接口时,在IP层,数据会回送到本地的回环接口,选项IP_MULTICAST_LOOP用于控制数据是否回送到本地的回环接口 b.option(ChannelOption.IP_MULTICAST_TTL, 255);//选项IP_MULTICAST_TTL允许设置超时TTL,范围为0~255之间的任何值 //b.option(ChannelOption.IP_MULTICAST_IF, null);//选项IP_MULTICAST_IF用于设置组播的默认网络接口,会从给定的网络接口发送,另一个网络接口会忽略此数据,参数addr是希望多播输出接口的IP地址,使用INADDR_ANY地址回送到默认接口。 //b.option(ChannelOption.WRITE_BUFFER_WATER_MARK, new WriteBufferWaterMark(32 * 1024, 1024 * 1024)); if (snd_buf.gateway_server > 0) b.option(ChannelOption.SO_SNDBUF, snd_buf.gateway_server); if (rcv_buf.gateway_server > 0) b.option(ChannelOption.SO_RCVBUF, rcv_buf.gateway_server); } @Override public ChannelHandler getChannelHandler() { return channelHandler; } public Connection getConnection() { return channelHandler.getConnection(); } public MessageDispatcher getMessageDispatcher() { return messageDispatcher; } }
package cz.cuni.mff.d3s.demo.environment; public enum ActuatorType { ROUTE; }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.testutils; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.JobStatus; import org.apache.flink.api.common.time.Deadline; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.executiongraph.AccessExecutionGraph; import org.apache.flink.runtime.minicluster.MiniCluster; import org.apache.flink.util.FileUtils; import org.apache.flink.util.function.SupplierWithException; import java.io.BufferedInputStream; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.lang.management.ManagementFactory; import java.lang.management.RuntimeMXBean; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.Arrays; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; /** This class contains auxiliary methods for unit tests. */ public class CommonTestUtils { private static final long RETRY_INTERVAL = 100L; /** * Gets the classpath with which the current JVM was started. * * @return The classpath with which the current JVM was started. */ public static String getCurrentClasspath() { RuntimeMXBean bean = ManagementFactory.getRuntimeMXBean(); return bean.getClassPath(); } /** Create a temporary log4j configuration for the test. */ public static File createTemporaryLog4JProperties() throws IOException { File log4jProps = File.createTempFile(FileUtils.getRandomFilename(""), "-log4j.properties"); log4jProps.deleteOnExit(); CommonTestUtils.printLog4jDebugConfig(log4jProps); return log4jProps; } /** * Tries to get the java executable command with which the current JVM was started. Returns * null, if the command could not be found. * * @return The java executable command. */ public static String getJavaCommandPath() { File javaHome = new File(System.getProperty("java.home")); String path1 = new File(javaHome, "java").getAbsolutePath(); String path2 = new File(new File(javaHome, "bin"), "java").getAbsolutePath(); try { ProcessBuilder bld = new ProcessBuilder(path1, "-version"); Process process = bld.start(); if (process.waitFor() == 0) { return path1; } } catch (Throwable t) { // ignore and try the second path } try { ProcessBuilder bld = new ProcessBuilder(path2, "-version"); Process process = bld.start(); if (process.waitFor() == 0) { return path2; } } catch (Throwable tt) { // no luck } return null; } public static void printLog4jDebugConfig(File file) throws IOException { try (PrintWriter writer = new PrintWriter(new FileWriter(file))) { writer.println("rootLogger.level = INFO"); writer.println("rootLogger.appenderRef.console.ref = ConsoleAppender"); writer.println("appender.console.name = ConsoleAppender"); writer.println("appender.console.type = CONSOLE"); writer.println("appender.console.target = SYSTEM_ERR"); writer.println("appender.console.layout.type = PatternLayout"); writer.println( "appender.console.layout.pattern = %d{HH:mm:ss,SSS} %-4r [%t] %-5p %c %x - %m%n"); writer.println("logger.jetty.name = org.eclipse.jetty.util.log"); writer.println("logger.jetty.level = OFF"); writer.println("logger.zookeeper.name = org.apache.zookeeper"); writer.println("logger.zookeeper.level = OFF"); writer.flush(); } } public static void waitUntilCondition( SupplierWithException<Boolean, Exception> condition, Deadline timeout) throws Exception { waitUntilCondition(condition, timeout, RETRY_INTERVAL); } public static void waitUntilCondition( SupplierWithException<Boolean, Exception> condition, Deadline timeout, long retryIntervalMillis) throws Exception { waitUntilCondition( condition, timeout, retryIntervalMillis, "Condition was not met in given timeout."); } public static void waitUntilCondition( SupplierWithException<Boolean, Exception> condition, Deadline timeout, String errorMsg) throws Exception { waitUntilCondition(condition, timeout, RETRY_INTERVAL, errorMsg); } public static void waitUntilCondition( SupplierWithException<Boolean, Exception> condition, Deadline timeout, long retryIntervalMillis, String errorMsg) throws Exception { while (timeout.hasTimeLeft() && !condition.get()) { final long timeLeft = Math.max(0, timeout.timeLeft().toMillis()); Thread.sleep(Math.min(retryIntervalMillis, timeLeft)); } if (!timeout.hasTimeLeft()) { throw new TimeoutException(errorMsg); } } public static void waitForAllTaskRunning(MiniCluster miniCluster, JobID jobId) throws Exception { waitForAllTaskRunning(() -> miniCluster.getExecutionGraph(jobId).get(60, TimeUnit.SECONDS)); } public static void waitForAllTaskRunning( SupplierWithException<AccessExecutionGraph, Exception> executionGraphSupplier) throws Exception { waitForAllTaskRunning( executionGraphSupplier, Deadline.fromNow(Duration.of(1, ChronoUnit.MINUTES))); } public static void waitForAllTaskRunning( SupplierWithException<AccessExecutionGraph, Exception> executionGraphSupplier, Deadline timeout) throws Exception { waitUntilCondition( () -> { final AccessExecutionGraph graph = executionGraphSupplier.get(); return graph.getState() == JobStatus.RUNNING && graph.getAllVertices().values().stream() .allMatch( jobVertex -> Arrays.stream(jobVertex.getTaskVertices()) .allMatch( task -> task.getExecutionState() == ExecutionState .RUNNING)); }, timeout); } public static void waitUntilJobManagerIsInitialized( SupplierWithException<JobStatus, Exception> jobStatusSupplier) throws Exception { waitUntilJobManagerIsInitialized( jobStatusSupplier, Deadline.fromNow(Duration.of(1, ChronoUnit.MINUTES))); } public static void waitUntilJobManagerIsInitialized( SupplierWithException<JobStatus, Exception> jobStatusSupplier, Deadline timeout) throws Exception { waitUntilCondition(() -> jobStatusSupplier.get() != JobStatus.INITIALIZING, timeout, 20L); } /** Utility class to read the output of a process stream and forward it into a StringWriter. */ public static class PipeForwarder extends Thread { private final StringWriter target; private final InputStream source; public PipeForwarder(InputStream source, StringWriter target) { super("Pipe Forwarder"); setDaemon(true); this.source = source; this.target = target; start(); } @Override public void run() { try { int next; while ((next = source.read()) != -1) { target.write(next); } } catch (IOException e) { // terminate } } } public static boolean isStreamContentEqual(InputStream input1, InputStream input2) throws IOException { if (!(input1 instanceof BufferedInputStream)) { input1 = new BufferedInputStream(input1); } if (!(input2 instanceof BufferedInputStream)) { input2 = new BufferedInputStream(input2); } int ch = input1.read(); while (-1 != ch) { int ch2 = input2.read(); if (ch != ch2) { return false; } ch = input1.read(); } int ch2 = input2.read(); return (ch2 == -1); } }
package aniski.rs.servers; import aniski.rs.ServerManager; import aniski.rs.Store; import aniski.rs.net.RSSocket; import aniski.rs.net.Stream; import aniski.rs.utils.ISAACRandomGen; import aniski.rs.utils.Misc; import java.io.IOException; import java.math.BigInteger; import java.security.SecureRandom; import java.util.Random; public class Simplicity extends ServerManager { private static final String serverAddress = "142.44.136.172"; private static final int port = 43594; private final BigInteger RSA_MODULUS = new BigInteger("141038977654242498796653256463581947707085475448374831324884224283104317501838296020488428503639086635001378639378416098546218003298341019473053164624088381038791532123008519201622098961063764779454144079550558844578144888226959180389428577531353862575582264379889305154355721898818709924743716570464556076517"); private final BigInteger RSA_EXPONENT = new BigInteger("65537"); public Simplicity(String username, String password, Stream stream, Stream inStream, Stream aStream_847, Store store, boolean flag, RSSocket rsSocket) throws IOException { super(serverAddress, port, username, password, stream, inStream, aStream_847, store, flag, rsSocket); } private static final String AB = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; private static SecureRandom rnd = new SecureRandom(); @Override public void initSocketRequest() throws IOException { long l = Misc.longForName(getUsername()); int i = (int)(l >> 16L & 0x1FL); /** Init Login Connection **/ getStream().currentOffset = 0; getStream().writeByte(14); getStream().writeByte(i); getRSSocket().queueBytes(2, getStream().buffer); for (int j = 0; j < 8; j++) getRSSocket().read(); int responseCode = getRSSocket().read(); /** RESPONSE MUST RETURN 0 **/ if (responseCode == 0) { getRSSocket().flushInputStream(getInStream().buffer, 8); getInStream().currentOffset = 0; long aLong1215 = getInStream().readQWord(); int[] ai = new int[4]; ai[0] = (int)(Math.random() * 9.9999999E7D); ai[1] = (int)(Math.random() * 9.9999999E7D); ai[2] = (int)(aLong1215 >> 32L); ai[3] = (int)aLong1215; getStream().currentOffset = 0; getStream().writeByte(10); getStream().writeInt(ai[0]); getStream().writeInt(ai[1]); getStream().writeInt(ai[2]); getStream().writeInt(ai[3]); getStream().writeInt(350); //MAC ADDRESS 08-60-6E-7C-33-69 //This server does not check if MAC is valid, generating random numbers for faster operation Random r = new Random(); int low = 10; int high = 99; String macAddress = (r.nextInt(high-low) + low) + "-" + (r.nextInt(high-low) + low) + "-" + (r.nextInt(high-low) + low) + "-" + (r.nextInt(high-low) + low) + "-" + (r.nextInt(high-low) + low) + "-" + (r.nextInt(high-low) + low); //Another means of protection that the server is expecting to see StringBuilder sb = new StringBuilder(12); for(int len = 0; len < 12; len++) sb.append(AB.charAt(rnd.nextInt(AB.length()))); StringBuilder stringBuilder = new StringBuilder(); for(int c = 0; c < 12; c++) stringBuilder.append(r.nextInt(254) + 'a'); getStream().writeString(stringBuilder.toString()); getStream().writeString(getPassword()); getStream().writeString(macAddress); getStream().writeString(" "); getStream().writeShort(222); getStream().writeByte(0); getStream().doKeys(RSA_MODULUS, RSA_EXPONENT); getInitialStream().currentOffset = 0; getInitialStream().writeByte(isFlag() ? 18 : 16); String currentPin = ""; getInitialStream().writeByte(getStream().currentOffset + 36 + 1 + 1 + 2 + (currentPin.length()) + 1); getInitialStream().writeByte(255); getInitialStream().writeShort(14); //int attack = Store.currentKey += 1; getInitialStream().writeByte(0); //Client is using Reflection to generate identifier for current client version, based on class files & cache files //Grabbed string from client memory, converting to bytes and sending to the server byte[] bytes = "d41d8cd98f00b204e9800998ecf8427e".getBytes(); getInitialStream().writeByte(bytes.length); getInitialStream().writeBytes(bytes, bytes.length, 0); //EXTRA - This server supports 2FA and is required within login stream /*Random rnd = new Random(); int number = rnd.nextInt(999999); String twoFactorPin = String.format("%06d", number);*/ String twoFactorPin = ""; setTwoFactor(twoFactorPin); getInitialStream().writeShort(0); getInitialStream().writeString(currentPin); getInitialStream().writeString(twoFactorPin); for (int i2 = 0; i2 < 9; i2++) getInitialStream().writeInt(0); getInitialStream().writeBytes(getStream().buffer, getStream().currentOffset, 0); getStream().encryption = new ISAACRandomGen(ai); for (int j2 = 0; j2 < 4; j2++) ai[j2] = ai[j2] + 50; getRSSocket().queueBytes(getInitialStream().currentOffset, getInitialStream().buffer); setResponseCode(getRSSocket().read()); if(getResponseCode() != -1 && getResponseCode() != 30) { // TextEditor.writePasswordToFile(String.valueOf(attack), String.valueOf(attack)); } } getRSSocket().close(); } }
/* * Copyright (c) 2002-2021 Gargoyle Software Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gargoylesoftware.htmlunit.html; import org.junit.Test; import org.junit.runner.RunWith; import com.gargoylesoftware.htmlunit.BrowserRunner; import com.gargoylesoftware.htmlunit.BrowserRunner.Alerts; import com.gargoylesoftware.htmlunit.WebDriverTestCase; /** * Tests for {@link DomNode}. * * @author Chris Erskine * @author Ahmed Ashour */ @RunWith(BrowserRunner.class) public class DomNode2Test extends WebDriverTestCase { /** * Test for Bug #1253. * * @throws Exception on test failure */ @Test @Alerts({"exception", "0"}) public void appendChild_recursive() throws Exception { final String html = "<html><head>\n" + "<script>\n" + LOG_TITLE_FUNCTION + "function test() {\n" + " var e = document.createElement('div');\n" + " try {\n" + " log(e.appendChild(e) === e);\n" + " } catch(e) {log('exception');}\n" + " log(e.childNodes.length);\n" + "}\n" + "</script>\n" + "</head><body onload='test()'>\n" + "</body></html>"; loadPageVerifyTitle2(html); } /** * Test for Bug #1253. * * @throws Exception on test failure */ @Test @Alerts({"true", "exception", "1", "0"}) public void appendChild_recursive_parent() throws Exception { final String html = "<html><head>\n" + "<script>\n" + LOG_TITLE_FUNCTION + "function test() {\n" + " var e1 = document.createElement('div');\n" + " var e2 = document.createElement('div');\n" + " try {\n" + " log(e1.appendChild(e2) === e2);\n" + " log(e2.appendChild(e1) === e1);\n" + " } catch(e) {log('exception');}\n" + " log(e1.childNodes.length);\n" + " log(e2.childNodes.length);\n" + "}\n" + "</script>\n" + "</head><body onload='test()'>\n" + "</body></html>"; loadPageVerifyTitle2(html); } /** * @throws Exception on test failure */ @Test @Alerts({"true", "true", "true"}) public void ownerDocument() throws Exception { final String content = "<html>\n" + "<head>\n" + " <script>\n" + LOG_TITLE_FUNCTION + " function test() {\n" + " log(document == document.body.ownerDocument);\n" + " log(document == document.getElementById('foo').ownerDocument);\n" + " log(document == document.body.firstChild.ownerDocument);\n" + " }\n" + " </script>\n" + "</head>\n" + "<body onload='test()'>bla\n" + "<div id='foo'>bla</div>\n" + "</body>\n" + "</html>"; loadPageVerifyTitle2(content); } }
package com.entrevista.service; import com.entrevista.entity.Loan; import org.springframework.http.ResponseEntity; import java.util.List; public interface LoanService { List<Loan> findAll(); Loan findActiveByClient(Long idClient); ResponseEntity<Loan> saveLoan(Loan loan); }
package module486packageJava0; import java.lang.Integer; public class Foo175 { Integer int0; Integer int1; public void foo0() { new module486packageJava0.Foo174().foo5(); } public void foo1() { foo0(); } public void foo2() { foo1(); } public void foo3() { foo2(); } public void foo4() { foo3(); } public void foo5() { foo4(); } }
/* Copyright (c) 2001-2011, The HSQL Development Group * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the HSQL Development Group nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG, * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hsqldb; import org.hsqldb.persist.CachedObject; import org.hsqldb.persist.PersistentStore; /** * Manages rows involved in transactions * * @author Fred Toussi (fredt@users dot sourceforge.net) * @version 2.2.7 * @since 2.0.0 */ public interface TransactionManager { // public int LOCKS = 0; public int MVLOCKS = 1; public int MVCC = 2; // public int ACTION_READ = 0; public int ACTION_DUP = 1; public int ACTION_REF = 2; public long getGlobalChangeTimestamp(); public RowAction addDeleteAction(Session session, Table table, Row row, int[] colMap); public void addInsertAction(Session session, Table table, PersistentStore store, Row row, int[] changedColumns); /** * add session to the end of queue when a transaction starts * (depending on isolation mode) */ public void beginAction(Session session, Statement cs); public void beginActionResume(Session session); public void beginTransaction(Session session); // functional unit - accessibility of rows public boolean canRead(Session session, Row row, int mode, int[] colMap); public boolean canRead(Session session, int id, int mode); public boolean commitTransaction(Session session); public void completeActions(Session session); public int getTransactionControl(); public boolean isMVRows(); public boolean isMVCC(); public boolean prepareCommitActions(Session session); public void rollback(Session session); public void rollbackAction(Session session); public void rollbackSavepoint(Session session, int index); public void setTransactionControl(Session session, int mode); /** * add transaction info to a row just loaded from the cache. called only * for CACHED tables */ public void setTransactionInfo(CachedObject object); /** * remove the transaction info */ public void removeTransactionInfo(CachedObject object); }
package com.pointcx.jvm.jdk7; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodType; public class ReflectUtil { ////////////////////////////////////////////////////////////////////////// //// Invoke Methods public static <T> T invokeStaticMethod(Class targetCls, String methodName, Class<T> returnType){ return invokeStaticMethod(targetCls, methodName, returnType, null, (Object[])null); } public static <T> T invokeStaticMethod(Class targetCls, String methodName, Class<T> returnType, Class[] paramTypes, Object ... params) { try { MethodType mt = null; if(paramTypes!=null) { mt = MethodType.methodType(returnType, paramTypes); }else{ mt = MethodType.methodType(returnType); } MethodHandle methodHandle = MethodHandles.lookup().findStatic(targetCls, methodName, mt); T result = (T) methodHandle.invokeWithArguments(params); return result; } catch (Throwable err) { throw new RuntimeException(err); } } public static <T> T invoke(Object target, String methodName, Class<T> returnType){ return invoke(target, methodName, returnType, null, (Object[])null); } public static <T> T invoke(Object target, String methodName, Class<T> returnType, Class[] paramTypes, Object... params) { try { MethodType mt = null; if(paramTypes!=null) { mt = MethodType.methodType(returnType, paramTypes); }else{ mt = MethodType.methodType(returnType); } MethodHandle methodHandle = MethodHandles.lookup().findVirtual(target.getClass(), methodName, mt); MethodHandle targetMethod = methodHandle.bindTo(target); return (T) targetMethod.invokeWithArguments(params); } catch (Throwable throwable) { throw new RuntimeException(throwable); } } ////////////////////////////////////////////////////////////////////////// //// Constructors public static <T> T newInstance(String className, ClassLoader classLoader, Class[] paramTypes, Object ... args){ try { return newInstance(classLoader.loadClass(className), paramTypes, args); } catch (Throwable e) { throw new RuntimeException(e); } } public static <T> T newInstance(String className, ClassLoader classLoader){ try { return (T) newInstance(classLoader.loadClass(className)); } catch (Throwable e) { throw new RuntimeException(e); } } public static <T> T newInstance(Class<T> type){ return newInstance(type, null, (Object[])null); } public static <T> T newInstance(Class type, Class[] paramTypes, Object ... args){ MethodType mt = null; if(paramTypes!=null) { mt = MethodType.methodType(void.class, paramTypes); }else{ mt = MethodType.methodType(void.class); } try { MethodHandle constructor = MethodHandles.lookup().findConstructor(type, mt); return (T) constructor.invokeWithArguments(args); } catch (Throwable throwable) { throw new RuntimeException(throwable); } } }
package db.connection.mysql.connection.service; import java.util.List; import db.connection.mysql.connection.dao.DepartmentDAO; import db.connection.mysql.connection.model.Department; public class DepartmentService { private DepartmentDAO departmentDAO; public DepartmentService(DepartmentDAO departmentDAO) { this.departmentDAO = departmentDAO; } public List<Department> kayıtBul(){ return departmentDAO.getAll(); } }