code
stringlengths 25
201k
| docstring
stringlengths 19
96.2k
| func_name
stringlengths 0
235
| language
stringclasses 1
value | repo
stringlengths 8
51
| path
stringlengths 11
314
| url
stringlengths 62
377
| license
stringclasses 7
values |
|---|---|---|---|---|---|---|---|
public boolean tryLock() throws IOException {
if (outputStream == null) {
init();
}
try {
lock = outputStream.getChannel().tryLock();
} catch (Exception e) {
return false;
}
return lock != null;
}
|
Try to acquire a lock on the locking file. This method immediately returns whenever the lock
is acquired or not.
@return True if successfully acquired the lock
@throws IOException If the file path is invalid
|
tryLock
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileLock.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileLock.java
|
Apache-2.0
|
public void unlock() throws IOException {
if (lock != null && lock.channel().isOpen()) {
lock.release();
}
}
|
Release the file lock.
@throws IOException If the FileChannel is closed
|
unlock
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileLock.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileLock.java
|
Apache-2.0
|
public void unlockAndDestroy() throws IOException {
try {
unlock();
if (lock != null) {
lock.channel().close();
lock = null;
}
if (outputStream != null) {
outputStream.close();
outputStream = null;
}
} finally {
this.file.delete();
}
}
|
Release the file lock, close the fileChannel and FileOutputStream then try deleting the
locking file if other file lock does not need it, which means the lock will not be used
anymore.
@throws IOException If an I/O error occurs
|
unlockAndDestroy
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileLock.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileLock.java
|
Apache-2.0
|
public boolean isValid() {
if (lock != null) {
return lock.isValid();
}
return false;
}
|
Check whether a FileLock is actually holding the lock.
@return True if it is actually holding the lock
|
isValid
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileLock.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileLock.java
|
Apache-2.0
|
private static String normalizeFileName(String fileName) {
return fileName.replaceAll("[^\\w/\\\\]", "");
}
|
Normalize the file name, which only allows slash, backslash, digits and letters.
@param fileName Original file name
@return File name with illegal characters stripped
|
normalizeFileName
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileLock.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileLock.java
|
Apache-2.0
|
public static void writeCompletely(WritableByteChannel channel, ByteBuffer src)
throws IOException {
while (src.hasRemaining()) {
channel.write(src);
}
}
|
The size of the buffer used for reading.
|
writeCompletely
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
public static java.nio.file.Path[] listDirectory(java.nio.file.Path directory)
throws IOException {
try (Stream<java.nio.file.Path> stream = Files.list(directory)) {
return stream.toArray(java.nio.file.Path[]::new);
}
}
|
Lists the given directory in a resource-leak-safe way.
|
listDirectory
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
public static void deleteFileOrDirectory(File file) throws IOException {
checkNotNull(file, "file");
guardIfNotThreadSafe(FileUtils::deleteFileOrDirectoryInternal, file);
}
|
Removes the given file or directory recursively.
<p>If the file or directory does not exist, this does not throw an exception, but simply does
nothing. It considers the fact that a file-to-be-deleted is not present a success.
<p>This method is safe against other concurrent deletion attempts.
@param file The file or directory to delete.
@throws IOException Thrown if the directory could not be cleaned for some reason, for example
due to missing access/write permissions.
|
deleteFileOrDirectory
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
public static void deleteDirectory(File directory) throws IOException {
checkNotNull(directory, "directory");
guardIfNotThreadSafe(FileUtils::deleteDirectoryInternal, directory);
}
|
Deletes the given directory recursively.
<p>If the directory does not exist, this does not throw an exception, but simply does
nothing. It considers the fact that a directory-to-be-deleted is not present a success.
<p>This method is safe against other concurrent deletion attempts.
@param directory The directory to be deleted.
@throws IOException Thrown if the given file is not a directory, or if the directory could
not be deleted for some reason, for example due to missing access/write permissions.
|
deleteDirectory
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
public static void deleteDirectoryQuietly(File directory) {
if (directory == null) {
return;
}
// delete and do not report if it fails
try {
deleteDirectory(directory);
} catch (Exception ignored) {
}
}
|
Deletes the given directory recursively, not reporting any I/O exceptions that occur.
<p>This method is identical to {@link FileUtils#deleteDirectory(File)}, except that it
swallows all exceptions and may leave the job quietly incomplete.
@param directory The directory to delete.
|
deleteDirectoryQuietly
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
public static Collection<java.nio.file.Path> listFilesInDirectory(
final java.nio.file.Path directory, final Predicate<java.nio.file.Path> fileFilter)
throws IOException {
checkNotNull(directory, "directory");
checkNotNull(fileFilter, "fileFilter");
if (!Files.exists(directory)) {
throw new IllegalArgumentException(
String.format("The directory %s dose not exist.", directory));
}
if (!Files.isDirectory(directory)) {
throw new IllegalArgumentException(
String.format("The %s is not a directory.", directory));
}
final FilterFileVisitor filterFileVisitor = new FilterFileVisitor(fileFilter);
Files.walkFileTree(
directory,
EnumSet.of(FileVisitOption.FOLLOW_LINKS),
Integer.MAX_VALUE,
filterFileVisitor);
return filterFileVisitor.getFiles();
}
|
List the {@code directory} recursively and return the files that satisfy the {@code
fileFilter}.
@param directory the directory to be listed
@param fileFilter a file filter
@return a collection of {@code File}s
@throws IOException if an I/O error occurs while listing the files in the given directory
|
listFilesInDirectory
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
public static long getDirectoryFilesSize(java.nio.file.Path path, FileVisitOption... options)
throws IOException {
if (path == null) {
return 0L;
}
try (Stream<java.nio.file.Path> pathStream = Files.walk(path, options)) {
return pathStream
.map(java.nio.file.Path::toFile)
.filter(File::isFile)
.mapToLong(File::length)
.sum();
}
}
|
Computes the sum of sizes of all files in the directory and it's subdirectories.
@param path the root path from which to start the calculation.
@param options visitation options for the directory traversal.
@return sum of sizes of all files in the directory and it's subdirectories.
@throws IOException if the size cannot be determined.
|
getDirectoryFilesSize
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
public static Path absolutizePath(Path pathToAbsolutize) throws IOException {
if (!pathToAbsolutize.isAbsolute()) {
FileSystem fs = pathToAbsolutize.getFileSystem();
return new Path(fs.getWorkingDirectory(), pathToAbsolutize);
} else {
return pathToAbsolutize;
}
}
|
Absolutize the given path if it is relative.
@param pathToAbsolutize path which is being absolutized if it is a relative path
@return the absolutized path
|
absolutizePath
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
public static java.nio.file.Path relativizePath(
java.nio.file.Path basePath, java.nio.file.Path pathToRelativize) {
if (pathToRelativize.isAbsolute()) {
return basePath.relativize(pathToRelativize);
} else {
return pathToRelativize;
}
}
|
Relativize the given path with respect to the given base path if it is absolute.
@param basePath to relativize against
@param pathToRelativize path which is being relativized if it is an absolute path
@return the relativized path
|
relativizePath
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
public static java.nio.file.Path getCurrentWorkingDirectory() {
return Paths.get(System.getProperty("user.dir"));
}
|
Returns the current working directory as specified by the {@code user.dir} system property.
@return current working directory
|
getCurrentWorkingDirectory
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
public static boolean isJarFile(java.nio.file.Path file) {
return JAR_FILE_EXTENSION.equals(
org.apache.flink.shaded.guava33.com.google.common.io.Files.getFileExtension(
file.toString()));
}
|
Checks whether the given file has a jar extension.
@param file to check
@return true if the file has a jar extension, otherwise false
|
isJarFile
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
public static String stripFileExtension(String fileName) {
final String extension =
org.apache.flink.shaded.guava33.com.google.common.io.Files.getFileExtension(
fileName);
if (!extension.isEmpty()) {
return fileName.substring(0, fileName.lastIndexOf(extension) - 1);
}
return fileName;
}
|
Remove the extension of the file name.
@param fileName to strip
@return the file name without extension
|
stripFileExtension
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
public static java.nio.file.Path getTargetPathIfContainsSymbolicPath(java.nio.file.Path path)
throws IOException {
java.nio.file.Path targetPath = path;
java.nio.file.Path suffixPath = Paths.get("");
while (path != null && path.getFileName() != null) {
if (Files.isSymbolicLink(path)) {
java.nio.file.Path linkedPath = path.toRealPath();
targetPath = Paths.get(linkedPath.toString(), suffixPath.toString());
break;
}
suffixPath = Paths.get(path.getFileName().toString(), suffixPath.toString());
path = path.getParent();
}
return targetPath;
}
|
Get a target path(the path that replaced symbolic links with linked path) if the original
path contains symbolic path, return the original path otherwise.
@param path the original path.
@return the path that replaced symbolic links with real path.
|
getTargetPathIfContainsSymbolicPath
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FileUtils.java
|
Apache-2.0
|
@Override
public final Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
try {
synchronized (getClassLoadingLock(name)) {
return loadClassWithoutExceptionHandling(name, resolve);
}
} catch (Throwable classLoadingException) {
classLoadingExceptionHandler.accept(classLoadingException);
throw classLoadingException;
}
}
|
This class loader accepts a custom handler if an exception occurs in {@link #loadClass(String,
boolean)}.
|
loadClass
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FlinkUserCodeClassLoader.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FlinkUserCodeClassLoader.java
|
Apache-2.0
|
protected Class<?> loadClassWithoutExceptionHandling(String name, boolean resolve)
throws ClassNotFoundException {
return super.loadClass(name, resolve);
}
|
Same as {@link #loadClass(String, boolean)} but without exception handling.
<p>Extending concrete class loaders should implement this instead of {@link
#loadClass(String, boolean)}.
|
loadClassWithoutExceptionHandling
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FlinkUserCodeClassLoader.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FlinkUserCodeClassLoader.java
|
Apache-2.0
|
public static ResolveOrder fromString(String resolveOrder) {
if (resolveOrder.equalsIgnoreCase("parent-first")) {
return PARENT_FIRST;
} else if (resolveOrder.equalsIgnoreCase("child-first")) {
return CHILD_FIRST;
} else {
throw new IllegalArgumentException("Unknown resolve order: " + resolveOrder);
}
}
|
Class resolution order for Flink URL {@link ClassLoader}.
|
fromString
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FlinkUserCodeClassLoaders.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FlinkUserCodeClassLoaders.java
|
Apache-2.0
|
@Override
public MutableURLClassLoader copy() {
return new ParentFirstClassLoader(getURLs(), getParent(), classLoadingExceptionHandler);
}
|
Regular URLClassLoader that first loads from the parent and only after that from the URLs.
|
copy
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/FlinkUserCodeClassLoaders.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/FlinkUserCodeClassLoaders.java
|
Apache-2.0
|
void addVersionsMatch(
String className, long localVersionUID, List<Long> streamVersionUIDs) {
supportedSerialVersionUidsPerClass
.computeIfAbsent(className, k -> new HashMap<>())
.put(localVersionUID, streamVersionUIDs);
}
|
Workaround for bugs like e.g. FLINK-36318 where we serialize a class into a snapshot and then
its serialVersionUID is changed in an uncontrolled way. This lets us deserialize the old
snapshot assuming the binary representation of the faulty class has not changed.
|
addVersionsMatch
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
boolean shouldTolerateSerialVersionMismatch(
String className, long localVersionUID, long streamVersionUID) {
return supportedSerialVersionUidsPerClass
.getOrDefault(className, Collections.emptyMap())
.getOrDefault(localVersionUID, Collections.emptyList())
.contains(streamVersionUID);
}
|
Checks if the local version of the given class can safely deserialize the class of a
different version from the object stream.
|
shouldTolerateSerialVersionMismatch
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
boolean haveRulesForClass(String className) {
return supportedSerialVersionUidsPerClass.containsKey(className);
}
|
Checks if there are any rules for the given class. This lets us decide early if we need
to look up the local class.
|
haveRulesForClass
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
@Override
protected ObjectStreamClass readClassDescriptor()
throws IOException, ClassNotFoundException {
ObjectStreamClass streamClassDescriptor = super.readClassDescriptor();
final Class localClass = resolveClass(streamClassDescriptor);
final String name = localClass.getName();
if (versionMismatchHandler.haveRulesForClass(name)) {
final ObjectStreamClass localClassDescriptor = ObjectStreamClass.lookup(localClass);
if (localClassDescriptor != null
&& localClassDescriptor.getSerialVersionUID()
!= streamClassDescriptor.getSerialVersionUID()) {
if (versionMismatchHandler.shouldTolerateSerialVersionMismatch(
name,
localClassDescriptor.getSerialVersionUID(),
streamClassDescriptor.getSerialVersionUID())) {
LOG.warn(
"Ignoring serialVersionUID mismatch for class {}; was {}, now {}.",
streamClassDescriptor.getName(),
streamClassDescriptor.getSerialVersionUID(),
localClassDescriptor.getSerialVersionUID());
streamClassDescriptor = localClassDescriptor;
}
}
}
return streamClassDescriptor;
}
|
An {@link ObjectInputStream} that ignores certain serialVersionUID mismatches. This is a
workaround for uncontrolled serialVersionUIDs changes.
|
readClassDescriptor
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static <T> T instantiate(
final String className, final Class<T> targetType, final ClassLoader classLoader)
throws FlinkException {
final Class<? extends T> clazz;
try {
clazz = Class.forName(className, false, classLoader).asSubclass(targetType);
} catch (ClassNotFoundException e) {
throw new FlinkException(
String.format(
"Could not instantiate class '%s' of type '%s'. Please make sure that this class is on your class path.",
className, targetType.getName()),
e);
}
return instantiate(clazz);
}
|
Creates a new instance of the given class name and type using the provided {@link
ClassLoader}.
@param className of the class to load
@param targetType type of the instantiated class
@param classLoader to use for loading the class
@param <T> type of the instantiated class
@return Instance of the given class name
@throws FlinkException if the class could not be found
|
instantiate
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static <T> T instantiate(Class<T> clazz, Class<? super T> castTo) {
if (clazz == null) {
throw new NullPointerException();
}
// check if the class is a subclass, if the check is required
if (castTo != null && !castTo.isAssignableFrom(clazz)) {
throw new RuntimeException(
"The class '"
+ clazz.getName()
+ "' is not a subclass of '"
+ castTo.getName()
+ "' as is required.");
}
return instantiate(clazz);
}
|
Creates a new instance of the given class.
@param <T> The generic type of the class.
@param clazz The class to instantiate.
@param castTo Optional parameter, specifying the class that the given class must be a
subclass off. This argument is added to prevent class cast exceptions occurring later.
@return An instance of the given class.
@throws RuntimeException Thrown, if the class could not be instantiated. The exception
contains a detailed message about the reason why the instantiation failed.
|
instantiate
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static <T> T instantiate(Class<T> clazz) {
if (clazz == null) {
throw new NullPointerException();
}
// try to instantiate the class
try {
return clazz.newInstance();
} catch (InstantiationException | IllegalAccessException iex) {
// check for the common problem causes
checkForInstantiation(clazz);
// here we are, if non of the common causes was the problem. then the error was
// most likely an exception in the constructor or field initialization
throw new RuntimeException(
"Could not instantiate type '"
+ clazz.getName()
+ "' due to an unspecified exception: "
+ iex.getMessage(),
iex);
} catch (Throwable t) {
String message = t.getMessage();
throw new RuntimeException(
"Could not instantiate type '"
+ clazz.getName()
+ "' Most likely the constructor (or a member variable initialization) threw an exception"
+ (message == null ? "." : ": " + message),
t);
}
}
|
Creates a new instance of the given class.
@param <T> The generic type of the class.
@param clazz The class to instantiate.
@return An instance of the given class.
@throws RuntimeException Thrown, if the class could not be instantiated. The exception
contains a detailed message about the reason why the instantiation failed.
|
instantiate
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static boolean hasPublicNullaryConstructor(Class<?> clazz) {
return Arrays.stream(clazz.getConstructors())
.anyMatch(constructor -> constructor.getParameterCount() == 0);
}
|
Checks, whether the given class has a public nullary constructor.
@param clazz The class to check.
@return True, if the class has a public nullary constructor, false if not.
|
hasPublicNullaryConstructor
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static boolean isPublic(Class<?> clazz) {
return Modifier.isPublic(clazz.getModifiers());
}
|
Checks, whether the given class is public.
@param clazz The class to check.
@return True, if the class is public, false if not.
|
isPublic
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static boolean isNonStaticInnerClass(Class<?> clazz) {
return clazz.getEnclosingClass() != null
&& (clazz.getDeclaringClass() == null || !Modifier.isStatic(clazz.getModifiers()));
}
|
Checks, whether the class is an inner class that is not statically accessible. That is
especially true for anonymous inner classes.
@param clazz The class to check.
@return True, if the class is a non-statically accessible inner class.
|
isNonStaticInnerClass
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static <T extends Serializable> T clone(T obj)
throws IOException, ClassNotFoundException {
if (obj == null) {
return null;
} else {
return clone(obj, obj.getClass().getClassLoader());
}
}
|
Clones the given serializable object using Java serialization.
@param obj Object to clone
@param <T> Type of the object to clone
@return The cloned object
@throws IOException Thrown if the serialization or deserialization process fails.
@throws ClassNotFoundException Thrown if any of the classes referenced by the object cannot
be resolved during deserialization.
|
clone
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static <T extends Serializable> T clone(T obj, ClassLoader classLoader)
throws IOException, ClassNotFoundException {
if (obj == null) {
return null;
} else {
final byte[] serializedObject = serializeObject(obj);
return deserializeObject(serializedObject, classLoader);
}
}
|
Clones the given serializable object using Java serialization, using the given classloader to
resolve the cloned classes.
@param obj Object to clone
@param classLoader The classloader to resolve the classes during deserialization.
@param <T> Type of the object to clone
@return Cloned object
@throws IOException Thrown if the serialization or deserialization process fails.
@throws ClassNotFoundException Thrown if any of the classes referenced by the object cannot
be resolved during deserialization.
|
clone
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static <T extends Serializable> T cloneUnchecked(T obj) {
try {
return clone(obj, obj.getClass().getClassLoader());
} catch (IOException | ClassNotFoundException e) {
throw new RuntimeException(
String.format("Unable to clone instance of %s.", obj.getClass().getName()), e);
}
}
|
Unchecked equivalent of {@link #clone(Serializable)}.
@param obj Object to clone
@param <T> Type of the object to clone
@return The cloned object
|
cloneUnchecked
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static <T extends IOReadableWritable> T createCopyWritable(T original)
throws IOException {
if (original == null) {
return null;
}
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (DataOutputViewStreamWrapper out = new DataOutputViewStreamWrapper(baos)) {
original.write(out);
}
final ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
try (DataInputViewStreamWrapper in = new DataInputViewStreamWrapper(bais)) {
@SuppressWarnings("unchecked")
T copy = (T) instantiate(original.getClass());
copy.read(in);
return copy;
}
}
|
Clones the given writable using the {@link IOReadableWritable serialization}.
@param original Object to clone
@param <T> Type of the object to clone
@return Cloned object
@throws IOException Thrown is the serialization fails.
|
createCopyWritable
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static <T> Class<T> resolveClassByName(DataInputView in, ClassLoader cl)
throws IOException {
return resolveClassByName(in, cl, Object.class);
}
|
Loads a class by name from the given input stream and reflectively instantiates it.
<p>This method will use {@link DataInputView#readUTF()} to read the class name, and then
attempt to load the class from the given ClassLoader.
@param in The stream to read the class name from.
@param cl The class loader to resolve the class.
@throws IOException Thrown, if the class name could not be read, the class could not be
found.
|
resolveClassByName
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static <T> Class<T> resolveClassByName(
DataInputView in, ClassLoader cl, Class<? super T> supertype) throws IOException {
final String className = in.readUTF();
final Class<?> rawClazz;
try {
rawClazz = Class.forName(className, false, cl);
} catch (ClassNotFoundException e) {
String error = "Could not find class '" + className + "' in classpath.";
if (className.contains("SerializerConfig")) {
error +=
" TypeSerializerConfigSnapshot and it's subclasses are not supported since Flink 1.17."
+ " If you are using built-in serializers, please first migrate to Flink 1.16."
+ " If you are using custom serializers, please migrate them to"
+ " TypeSerializerSnapshot using Flink 1.16.";
}
throw new IOException(error, e);
}
if (!supertype.isAssignableFrom(rawClazz)) {
throw new IOException(
"The class " + className + " is not a subclass of " + supertype.getName());
}
@SuppressWarnings("unchecked")
Class<T> clazz = (Class<T>) rawClazz;
return clazz;
}
|
Loads a class by name from the given input stream and reflectively instantiates it.
<p>This method will use {@link DataInputView#readUTF()} to read the class name, and then
attempt to load the class from the given ClassLoader.
<p>The resolved class is checked to be equal to or a subtype of the given supertype class.
@param in The stream to read the class name from.
@param cl The class loader to resolve the class.
@param supertype A class that the resolved class must extend.
@throws IOException Thrown, if the class name could not be read, the class could not be
found, or the class is not a subtype of the given supertype class.
|
resolveClassByName
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
Apache-2.0
|
public static void copyBytes(
final InputStream in, final OutputStream out, final int buffSize, final boolean close)
throws IOException {
@SuppressWarnings("resource")
final PrintStream ps = out instanceof PrintStream ? (PrintStream) out : null;
final byte[] buf = new byte[buffSize];
try {
int bytesRead = in.read(buf);
while (bytesRead >= 0) {
out.write(buf, 0, bytesRead);
if ((ps != null) && ps.checkError()) {
throw new IOException("Unable to write to output stream.");
}
bytesRead = in.read(buf);
}
} finally {
if (close) {
out.close();
in.close();
}
}
}
|
Copies from one stream to another.
@param in InputStream to read from
@param out OutputStream to write to
@param buffSize the size of the buffer
@param close whether or not close the InputStream and OutputStream at the end. The streams
are closed in the finally clause.
@throws IOException thrown if an error occurred while writing to the output stream
|
copyBytes
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static void copyBytes(final InputStream in, final OutputStream out) throws IOException {
copyBytes(in, out, BLOCKSIZE, true);
}
|
Copies from one stream to another. <strong>closes the input and output streams at the
end</strong>.
@param in InputStream to read from
@param out OutputStream to write to
@throws IOException thrown if an I/O error occurs while copying
|
copyBytes
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static void copyBytes(final InputStream in, final OutputStream out, final boolean close)
throws IOException {
copyBytes(in, out, BLOCKSIZE, close);
}
|
Copies from one stream to another.
@param in InputStream to read from
@param out OutputStream to write to
@param close whether or not close the InputStream and OutputStream at the end. The streams
are closed in the finally clause.
@throws IOException thrown if an I/O error occurs while copying
|
copyBytes
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static void readFully(final InputStream in, final byte[] buf, int off, final int len)
throws IOException {
int toRead = len;
while (toRead > 0) {
final int ret = in.read(buf, off, toRead);
if (ret < 0) {
throw new IOException("Premature EOF from inputStream");
}
toRead -= ret;
off += ret;
}
}
|
Reads len bytes in a loop.
@param in The InputStream to read from
@param buf The buffer to fill
@param off offset from the buffer
@param len the length of bytes to read
@throws IOException if it could not read requested number of bytes for any reason (including
EOF)
|
readFully
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static int tryReadFully(final InputStream in, final byte[] buf) throws IOException {
int totalRead = 0;
while (totalRead != buf.length) {
int read = in.read(buf, totalRead, buf.length - totalRead);
if (read == -1) {
break;
}
totalRead += read;
}
return totalRead;
}
|
Similar to {@link #readFully(InputStream, byte[], int, int)}. Returns the total number of
bytes read into the buffer.
@param in The InputStream to read from
@param buf The buffer to fill
@return The total number of bytes read into the buffer
@throws IOException If the first byte cannot be read for any reason other than end of file,
or if the input stream has been closed, or if some other I/O error occurs.
|
tryReadFully
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static void skipFully(final InputStream in, long len) throws IOException {
while (len > 0) {
final long ret = in.skip(len);
if (ret < 0) {
throw new IOException("Premeture EOF from inputStream");
}
len -= ret;
}
}
|
Similar to readFully(). Skips bytes in a loop.
@param in The InputStream to skip bytes from
@param len number of bytes to skip
@throws IOException if it could not skip requested number of bytes for any reason (including
EOF)
|
skipFully
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static void cleanup(final Logger log, final AutoCloseable... closeables) {
for (AutoCloseable c : closeables) {
if (c != null) {
try {
c.close();
} catch (Exception e) {
if (log != null && log.isDebugEnabled()) {
log.debug("Exception in closing " + c, e);
}
}
}
}
}
|
Close the AutoCloseable objects and <b>ignore</b> any {@link Exception} or null pointers.
Must only be used for cleanup in exception handlers.
@param log the log to record problems to at debug level. Can be <code>null</code>.
@param closeables the objects to close
|
cleanup
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static void closeStream(final java.io.Closeable stream) {
cleanup(null, stream);
}
|
Closes the stream ignoring {@link IOException}. Must only be called in cleaning up from
exception handlers.
@param stream the stream to close
|
closeStream
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static void closeSocket(final Socket sock) {
// avoids try { close() } dance
if (sock != null) {
try {
sock.close();
} catch (IOException ignored) {
}
}
}
|
Closes the socket ignoring {@link IOException}.
@param sock the socket to close
|
closeSocket
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static void closeAllQuietly(Iterable<? extends AutoCloseable> closeables) {
if (null != closeables) {
for (AutoCloseable closeable : closeables) {
closeQuietly(closeable);
}
}
}
|
Closes all elements in the iterable with closeQuietly().
|
closeAllQuietly
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static void closeQuietly(AutoCloseable closeable) {
try {
if (closeable != null) {
closeable.close();
}
} catch (Throwable ignored) {
}
}
|
Closes the given AutoCloseable.
<p><b>Important:</b> This method is expected to never throw an exception.
|
closeQuietly
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static void deleteFilesRecursively(Path path) throws Exception {
File[] files = path.toFile().listFiles();
if (files == null || files.length == 0) {
return;
}
for (File file : files) {
if (!file.isDirectory()) {
Files.deleteIfExists(file.toPath());
} else {
deleteFilesRecursively(file.toPath());
}
}
}
|
Delete the given directory or file recursively.
|
deleteFilesRecursively
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static void deleteFileQuietly(Path path) {
try {
Files.deleteIfExists(path);
} catch (Throwable ignored) {
}
}
|
Deletes the given file.
<p><b>Important:</b> This method is expected to never throw an exception.
|
deleteFileQuietly
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IOUtils.java
|
Apache-2.0
|
public static <E> Stream<E> toStream(Iterable<E> iterable) {
checkNotNull(iterable);
return iterable instanceof Collection
? ((Collection<E>) iterable).stream()
: StreamSupport.stream(iterable.spliterator(), false);
}
|
Convert the given {@link Iterable} to a {@link Stream}.
@param iterable to convert to a stream
@param <E> type of the elements of the iterable
@return stream converted from the given {@link Iterable}
|
toStream
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/IterableUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/IterableUtils.java
|
Apache-2.0
|
public static <T> void applyToAllWhileSuppressingExceptions(
Iterable<T> inputs, ThrowingConsumer<T, ? extends Exception> throwingConsumer)
throws Exception {
if (inputs != null && throwingConsumer != null) {
Exception exception = null;
for (T input : inputs) {
if (input != null) {
try {
throwingConsumer.accept(input);
} catch (Exception ex) {
exception = ExceptionUtils.firstOrSuppressed(ex, exception);
}
}
}
if (exception != null) {
throw exception;
}
}
}
|
This method supplies all elements from the input to the consumer. Exceptions that happen on
elements are suppressed until all elements are processed. If exceptions happened for one or
more of the inputs, they are reported in a combining suppressed exception.
@param inputs iterator for all inputs to the throwingConsumer.
@param throwingConsumer this consumer will be called for all elements delivered by the input
iterator.
@param <T> the type of input.
@throws Exception collected exceptions that happened during the invocation of the consumer on
the input elements.
|
applyToAllWhileSuppressingExceptions
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/LambdaUtil.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/LambdaUtil.java
|
Apache-2.0
|
public static <K, V> LinkedOptionalMap<K, V> optionalMapOf(
Map<K, V> sourceMap, Function<K, String> keyNameGetter) {
LinkedHashMap<String, KeyValue<K, V>> underlyingMap =
CollectionUtil.newLinkedHashMapWithExpectedSize(sourceMap.size());
sourceMap.forEach(
(k, v) -> {
String keyName = keyNameGetter.apply(k);
underlyingMap.put(keyName, new KeyValue<>(k, v));
});
return new LinkedOptionalMap<>(underlyingMap);
}
|
Creates an {@code LinkedOptionalMap} from the provided map.
<p>This method is the equivalent of {@link Optional#of(Object)} but for maps. To support more
than one {@code NULL} key, an optional map requires a unique string name to be associated
with each key (provided by keyNameGetter)
@param sourceMap a source map to wrap as an optional map.
@param keyNameGetter function that assigns a unique name to the keys of the source map.
@param <K> key type
@param <V> value type
@return an {@code LinkedOptionalMap} with optional named keys, and optional values.
|
optionalMapOf
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/LinkedOptionalMap.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/LinkedOptionalMap.java
|
Apache-2.0
|
public Set<String> absentKeysOrValues() {
return underlyingMap.entrySet().stream()
.filter(LinkedOptionalMap::keyOrValueIsAbsent)
.map(Entry::getKey)
.collect(Collectors.toCollection(LinkedHashSet::new));
}
|
Returns the key names of any keys or values that are absent.
|
absentKeysOrValues
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/LinkedOptionalMap.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/LinkedOptionalMap.java
|
Apache-2.0
|
public boolean hasAbsentKeysOrValues() {
for (Entry<String, KeyValue<K, V>> entry : underlyingMap.entrySet()) {
if (keyOrValueIsAbsent(entry)) {
return true;
}
}
return false;
}
|
Checks whether there are entries with absent keys or values.
|
hasAbsentKeysOrValues
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/LinkedOptionalMap.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/LinkedOptionalMap.java
|
Apache-2.0
|
public LinkedHashMap<K, V> unwrapOptionals() {
final LinkedHashMap<K, V> unwrapped =
CollectionUtil.newLinkedHashMapWithExpectedSize(underlyingMap.size());
for (Entry<String, KeyValue<K, V>> entry : underlyingMap.entrySet()) {
String namedKey = entry.getKey();
KeyValue<K, V> kv = entry.getValue();
if (kv.key == null) {
throw new IllegalStateException("Missing key '" + namedKey + "'");
}
if (kv.value == null) {
throw new IllegalStateException("Missing value for the key '" + namedKey + "'");
}
unwrapped.put(kv.key, kv.value);
}
return unwrapped;
}
|
Assuming all the entries of this map are present (keys and values) this method would return a
map with these key and values, stripped from their Optional wrappers. NOTE: please note that
if any of the key or values are absent this method would throw an {@link
IllegalStateException}.
|
unwrapOptionals
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/LinkedOptionalMap.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/LinkedOptionalMap.java
|
Apache-2.0
|
public boolean isOrderedSubset() {
return isOrderedSubset;
}
|
Returns {@code true} if keyNames present at @left, appearing in prefix order at @right.
|
isOrderedSubset
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/LinkedOptionalMap.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/LinkedOptionalMap.java
|
Apache-2.0
|
public static int log2floor(int value) throws ArithmeticException {
if (value == 0) {
throw new ArithmeticException("Logarithm of zero is undefined.");
}
return 31 - Integer.numberOfLeadingZeros(value);
}
|
Computes the logarithm of the given value to the base of 2, rounded down. It corresponds to
the position of the highest non-zero bit. The position is counted, starting with 0 from the
least significant bit to the most significant bit. For example, <code>log2floor(16) = 4
</code>, and <code>log2floor(10) = 3</code>.
@param value The value to compute the logarithm for.
@return The logarithm (rounded down) to the base of 2.
@throws ArithmeticException Thrown, if the given value is zero.
|
log2floor
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
Apache-2.0
|
public static int log2strict(int value) throws ArithmeticException, IllegalArgumentException {
if (value == 0) {
throw new ArithmeticException("Logarithm of zero is undefined.");
}
if ((value & (value - 1)) != 0) {
throw new IllegalArgumentException(
"The given value " + value + " is not a power of two.");
}
return 31 - Integer.numberOfLeadingZeros(value);
}
|
Computes the logarithm of the given value to the base of 2. This method throws an error, if
the given argument is not a power of 2.
@param value The value to compute the logarithm for.
@return The logarithm to the base of 2.
@throws ArithmeticException Thrown, if the given value is zero.
@throws IllegalArgumentException Thrown, if the given value is not a power of two.
|
log2strict
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
Apache-2.0
|
public static int roundDownToPowerOf2(int value) {
return Integer.highestOneBit(value);
}
|
Decrements the given number down to the closest power of two. If the argument is a power of
two, it remains unchanged.
@param value The value to round down.
@return The closest value that is a power of two and less or equal than the given value.
|
roundDownToPowerOf2
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
Apache-2.0
|
public static int checkedDownCast(long value) {
int downCast = (int) value;
if (downCast != value) {
throw new IllegalArgumentException(
"Cannot downcast long value " + value + " to integer.");
}
return downCast;
}
|
Casts the given value to a 32 bit integer, if it can be safely done. If the cast would change
the numeric value, this method raises an exception.
<p>This method is a protection in places where one expects to be able to safely case, but
where unexpected situations could make the cast unsafe and would cause hidden problems that
are hard to track down.
@param value The value to be cast to an integer.
@return The given value as an integer.
@see Math#toIntExact(long)
|
checkedDownCast
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
Apache-2.0
|
public static boolean isPowerOf2(long value) {
return (value & (value - 1)) == 0;
}
|
Checks whether the given value is a power of two.
@param value The value to check.
@return True, if the value is a power of two, false otherwise.
|
isPowerOf2
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
Apache-2.0
|
public static int jenkinsHash(int code) {
code = (code + 0x7ed55d16) + (code << 12);
code = (code ^ 0xc761c23c) ^ (code >>> 19);
code = (code + 0x165667b1) + (code << 5);
code = (code + 0xd3a2646c) ^ (code << 9);
code = (code + 0xfd7046c5) + (code << 3);
code = (code ^ 0xb55a4f09) ^ (code >>> 16);
return code >= 0 ? code : -(code + 1);
}
|
This function hashes an integer value. It is adapted from Bob Jenkins' website <a
href="http://www.burtleburtle.net/bob/hash/integer.html">http://www.burtleburtle.net/bob/hash/integer.html</a>.
The hash function has the <i>full avalanche</i> property, meaning that every bit of the value
to be hashed affects every bit of the hash value.
<p>It is crucial to use different hash functions to partition data across machines and the
internal partitioning of data structures. This hash function is intended for partitioning
internally in data structures.
@param code The integer to be hashed.
@return The non-negative hash code for the integer.
|
jenkinsHash
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
Apache-2.0
|
public static int murmurHash(int code) {
code *= 0xcc9e2d51;
code = Integer.rotateLeft(code, 15);
code *= 0x1b873593;
code = Integer.rotateLeft(code, 13);
code = code * 5 + 0xe6546b64;
code ^= 4;
code = bitMix(code);
if (code >= 0) {
return code;
} else if (code != Integer.MIN_VALUE) {
return -code;
} else {
return 0;
}
}
|
This function hashes an integer value.
<p>It is crucial to use different hash functions to partition data across machines and the
internal partitioning of data structures. This hash function is intended for partitioning
across machines.
@param code The integer to be hashed.
@return The non-negative hash code for the integer.
|
murmurHash
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
Apache-2.0
|
public static int roundUpToPowerOfTwo(int x) {
x = x - 1;
x |= x >> 1;
x |= x >> 2;
x |= x >> 4;
x |= x >> 8;
x |= x >> 16;
return x + 1;
}
|
Round the given number to the next power of two.
@param x number to round
@return x rounded up to the next power of two
|
roundUpToPowerOfTwo
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
Apache-2.0
|
public static int longToIntWithBitMixing(long in) {
in = (in ^ (in >>> 30)) * 0xbf58476d1ce4e5b9L;
in = (in ^ (in >>> 27)) * 0x94d049bb133111ebL;
in = in ^ (in >>> 31);
return (int) in;
}
|
Pseudo-randomly maps a long (64-bit) to an integer (32-bit) using some bit-mixing for better
distribution.
@param in the long (64-bit)input.
@return the bit-mixed int (32-bit) output
|
longToIntWithBitMixing
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
Apache-2.0
|
public static int bitMix(int in) {
in ^= in >>> 16;
in *= 0x85ebca6b;
in ^= in >>> 13;
in *= 0xc2b2ae35;
in ^= in >>> 16;
return in;
}
|
Bit-mixing for pseudo-randomization of integers (e.g., to guard against bad hash functions).
Implementation is from Murmur's 32 bit finalizer.
@param in the input value
@return the bit-mixed output value
|
bitMix
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
Apache-2.0
|
public static long flipSignBit(long in) {
return in ^ Long.MIN_VALUE;
}
|
Flips the sign bit (most-significant-bit) of the input.
@param in the input value.
@return the input with a flipped sign bit (most-significant-bit).
|
flipSignBit
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
Apache-2.0
|
public static int divideRoundUp(int dividend, int divisor) {
Preconditions.checkArgument(dividend >= 0, "Negative dividend is not supported.");
Preconditions.checkArgument(divisor > 0, "Negative or zero divisor is not supported.");
return dividend == 0 ? 0 : (dividend - 1) / divisor + 1;
}
|
Divide and rounding up to integer. E.g., divideRoundUp(3, 2) returns 2, divideRoundUp(0, 3)
returns 0. Note that this method does not support negative values.
@param dividend value to be divided by the divisor
@param divisor value by which the dividend is to be divided
@return the quotient rounding up to integer
|
divideRoundUp
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MathUtils.java
|
Apache-2.0
|
public static MdcCloseable withContext(Map<String, String> context) {
final Map<String, String> orig = MDC.getCopyOfContextMap();
MDC.setContextMap(context);
return () -> {
if (orig != null) {
MDC.setContextMap(orig);
} else {
MDC.clear();
}
};
}
|
Replace MDC contents with the provided one and return a closeable object that can be used to
restore the original MDC.
@param context to put into MDC
|
withContext
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MdcUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MdcUtils.java
|
Apache-2.0
|
public static Runnable wrapRunnable(Map<String, String> contextData, Runnable command) {
return () -> {
try (MdcCloseable ctx = withContext(contextData)) {
command.run();
}
};
}
|
Wrap the given {@link Runnable} so that the given data is added to {@link MDC} before its
execution and removed afterward.
|
wrapRunnable
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MdcUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MdcUtils.java
|
Apache-2.0
|
public static <T> Callable<T> wrapCallable(
Map<String, String> contextData, Callable<T> command) {
return () -> {
try (MdcCloseable ctx = withContext(contextData)) {
return command.call();
}
};
}
|
Wrap the given {@link Callable} so that the given data is added to {@link MDC} before its
execution and removed afterward.
|
wrapCallable
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MdcUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MdcUtils.java
|
Apache-2.0
|
public static MultipleParameterTool fromArgs(String[] args) {
final Map<String, Collection<String>> map =
CollectionUtil.newHashMapWithExpectedSize(args.length / 2);
int i = 0;
while (i < args.length) {
final String key = Utils.getKeyFromArgs(args, i);
i += 1; // try to find the value
map.putIfAbsent(key, new ArrayList<>());
if (i >= args.length) {
map.get(key).add(NO_VALUE_KEY);
} else if (NumberUtils.isNumber(args[i])) {
map.get(key).add(args[i]);
i += 1;
} else if (args[i].startsWith("--") || args[i].startsWith("-")) {
// the argument cannot be a negative number because we checked earlier
// -> the next argument is a parameter name
map.get(key).add(NO_VALUE_KEY);
} else {
map.get(key).add(args[i]);
i += 1;
}
}
return fromMultiMap(map);
}
|
Returns {@link MultipleParameterTool} for the given arguments. The arguments are keys
followed by values. Keys have to start with '-' or '--'
<p><strong>Example arguments:</strong> --key1 value1 --key2 value2 -key3 value3 --multi
multiValue1 --multi multiValue2
@param args Input array arguments
@return A {@link MultipleParameterTool}
|
fromArgs
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MultipleParameterTool.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MultipleParameterTool.java
|
Apache-2.0
|
@Override
public String get(String key) {
addToDefaults(key, null);
unrequestedParameters.remove(key);
if (!data.containsKey(key)) {
return null;
}
Preconditions.checkState(
data.get(key).size() == 1, "Key %s should has only one value.", key);
return (String) data.get(key).toArray()[0];
}
|
Returns the String value for the given key. The value should only have one item. Use {@link
#getMultiParameter(String)} instead if want to get multiple values parameter. If the key does
not exist it will return null.
|
get
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MultipleParameterTool.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MultipleParameterTool.java
|
Apache-2.0
|
public Collection<String> getMultiParameter(String key) {
addToDefaults(key, null);
unrequestedParameters.remove(key);
return data.getOrDefault(key, null);
}
|
Returns the Collection of String values for the given key. If the key does not exist it will
return null.
|
getMultiParameter
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MultipleParameterTool.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MultipleParameterTool.java
|
Apache-2.0
|
public Collection<String> getMultiParameterRequired(String key) {
addToDefaults(key, null);
Collection<String> value = getMultiParameter(key);
if (value == null) {
throw new RuntimeException("No data for required key '" + key + "'");
}
return value;
}
|
Returns the Collection of String values for the given key. If the key does not exist it will
throw a {@link RuntimeException}.
|
getMultiParameterRequired
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MultipleParameterTool.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MultipleParameterTool.java
|
Apache-2.0
|
@Override
public void addURL(URL url) {
super.addURL(url);
}
|
URL class loader that exposes the `addURL` method in URLClassLoader.
|
addURL
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/MutableURLClassLoader.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/MutableURLClassLoader.java
|
Apache-2.0
|
public static String getHostnameFromFQDN(String fqdn) {
if (fqdn == null) {
throw new IllegalArgumentException("fqdn is null");
}
int dotPos = fqdn.indexOf('.');
if (dotPos == -1) {
return fqdn;
} else {
return fqdn.substring(0, dotPos);
}
}
|
Turn a fully qualified domain name (fqdn) into a hostname. If the fqdn has multiple subparts
(separated by a period '.'), it will take the first part. Otherwise it takes the entire fqdn.
@param fqdn The fully qualified domain name.
@return The hostname.
|
getHostnameFromFQDN
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static URL getCorrectHostnamePort(String hostPort) {
return validateHostPortString(hostPort);
}
|
Converts a string of the form "host:port" into an {@link URL}.
@param hostPort The "host:port" string.
@return The converted URL.
|
getCorrectHostnamePort
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static InetSocketAddress parseHostPortAddress(String hostPort) {
URL url = validateHostPortString(hostPort);
return new InetSocketAddress(url.getHost(), url.getPort());
}
|
Converts a string of the form "host:port" into an {@link InetSocketAddress}.
@param hostPort The "host:port" string.
@return The converted InetSocketAddress.
|
parseHostPortAddress
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
private static URL validateHostPortString(String hostPort) {
if (StringUtils.isNullOrWhitespaceOnly(hostPort)) {
throw new IllegalArgumentException("hostPort should not be null or empty");
}
try {
URL u =
(hostPort.toLowerCase().startsWith("http://")
|| hostPort.toLowerCase().startsWith("https://"))
? new URL(hostPort)
: new URL("http://" + hostPort);
if (u.getHost() == null) {
throw new IllegalArgumentException(
"The given host:port ('" + hostPort + "') doesn't contain a valid host");
}
if (u.getPort() == -1) {
throw new IllegalArgumentException(
"The given host:port ('" + hostPort + "') doesn't contain a valid port");
}
return u;
} catch (MalformedURLException e) {
throw new IllegalArgumentException(
"The given host:port ('" + hostPort + "') is invalid", e);
}
}
|
Validates if the given String represents a hostname:port.
<p>Works also for ipv6.
<p>See:
http://stackoverflow.com/questions/2345063/java-common-way-to-validate-and-convert-hostport-to-inetsocketaddress
@return URL object for accessing host and port
|
validateHostPortString
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static URL socketToUrl(InetSocketAddress socketAddress) {
String hostString = socketAddress.getHostString();
// If the hostString is an IPv6 address, it needs to be enclosed in square brackets
// at the beginning and end.
if (socketAddress.getAddress() != null
&& socketAddress.getAddress() instanceof Inet6Address
&& hostString.equals(socketAddress.getAddress().getHostAddress())) {
hostString = "[" + hostString + "]";
}
String hostPort = hostString + ":" + socketAddress.getPort();
return validateHostPortString(hostPort);
}
|
Converts an InetSocketAddress to a URL. This method assigns the "http://" schema to the URL
by default.
@param socketAddress the InetSocketAddress to be converted
@return a URL object representing the provided socket address with "http://" schema
|
socketToUrl
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static Socket acceptWithoutTimeout(ServerSocket serverSocket) throws IOException {
Preconditions.checkArgument(
serverSocket.getSoTimeout() == 0, "serverSocket SO_TIMEOUT option must be 0");
while (true) {
try {
return serverSocket.accept();
} catch (SocketTimeoutException exception) {
// This should be impossible given that the socket timeout is set to zero
// which indicates an infinite timeout. This is due to the underlying JDK-8237858
// bug. We retry the accept call indefinitely to replicate the expected behavior.
}
}
}
|
Calls {@link ServerSocket#accept()} on the provided server socket, suppressing any thrown
{@link SocketTimeoutException}s. This is a workaround for the underlying JDK-8237858 bug in
JDK 11 that can cause errant SocketTimeoutExceptions to be thrown at unexpected times.
<p>This method expects the provided ServerSocket has no timeout set (SO_TIMEOUT of 0),
indicating an infinite timeout. It will suppress all SocketTimeoutExceptions, even if a
ServerSocket with a non-zero timeout is passed in.
@param serverSocket a ServerSocket with {@link SocketOptions#SO_TIMEOUT SO_TIMEOUT} set to 0;
if SO_TIMEOUT is greater than 0, then this method will suppress SocketTimeoutException;
must not be null; SO_TIMEOUT option must be set to 0
@return the new Socket
@throws IOException see {@link ServerSocket#accept()}
@see <a href="https://bugs.openjdk.java.net/browse/JDK-8237858">JDK-8237858</a>
|
acceptWithoutTimeout
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static Port getAvailablePort() {
for (int i = 0; i < 50; i++) {
try (ServerSocket serverSocket = new ServerSocket(0)) {
int port = serverSocket.getLocalPort();
if (port != 0) {
FileLock fileLock = new FileLock(NetUtils.class.getName() + port);
if (fileLock.tryLock()) {
return new Port(port, fileLock);
} else {
fileLock.unlockAndDestroy();
}
}
} catch (IOException ignored) {
}
}
throw new RuntimeException("Could not find a free permitted port on the machine.");
}
|
Find a non-occupied port.
@return A non-occupied port.
|
getAvailablePort
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static String unresolvedHostToNormalizedString(String host) {
// Return loopback interface address if host is null
// This represents the behavior of {@code InetAddress.getByName } and RFC 3330
if (host == null) {
host = InetAddress.getLoopbackAddress().getHostAddress();
} else {
host = host.trim().toLowerCase();
if (host.startsWith("[") && host.endsWith("]")) {
String address = host.substring(1, host.length() - 1);
if (InetAddresses.isInetAddress(address)) {
host = address;
}
}
}
// normalize and valid address
if (InetAddresses.isInetAddress(host)) {
InetAddress inetAddress = InetAddresses.forString(host);
if (inetAddress instanceof Inet6Address) {
byte[] ipV6Address = inetAddress.getAddress();
host = getIPv6UrlRepresentation(ipV6Address);
}
} else {
try {
// We don't allow these in hostnames
Preconditions.checkArgument(!host.startsWith("."));
Preconditions.checkArgument(!host.endsWith("."));
Preconditions.checkArgument(!host.contains(":"));
} catch (Exception e) {
throw new IllegalConfigurationException("The configured hostname is not valid", e);
}
}
return host;
}
|
Returns an address in a normalized format for Pekko. When an IPv6 address is specified, it
normalizes the IPv6 address to avoid complications with the exact URL match policy of Pekko.
@param host The hostname, IPv4 or IPv6 address
@return host which will be normalized if it is an IPv6 address
|
unresolvedHostToNormalizedString
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static String unresolvedHostAndPortToNormalizedString(String host, int port) {
Preconditions.checkArgument(isValidHostPort(port), "Port is not within the valid range,");
return unresolvedHostToNormalizedString(host) + ":" + port;
}
|
Returns a valid address for Pekko. It returns a String of format 'host:port'. When an IPv6
address is specified, it normalizes the IPv6 address to avoid complications with the exact
URL match policy of Pekko.
@param host The hostname, IPv4 or IPv6 address
@param port The port
@return host:port where host will be normalized if it is an IPv6 address
|
unresolvedHostAndPortToNormalizedString
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static String ipAddressToUrlString(InetAddress address) {
if (address == null) {
throw new NullPointerException("address is null");
} else if (address instanceof Inet4Address) {
return address.getHostAddress();
} else if (address instanceof Inet6Address) {
return getIPv6UrlRepresentation((Inet6Address) address);
} else {
throw new IllegalArgumentException("Unrecognized type of InetAddress: " + address);
}
}
|
Encodes an IP address properly as a URL string. This method makes sure that IPv6 addresses
have the proper formatting to be included in URLs.
@param address The IP address to encode.
@return The proper URL string encoded IP address.
|
ipAddressToUrlString
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static String ipAddressAndPortToUrlString(InetAddress address, int port) {
return ipAddressToUrlString(address) + ':' + port;
}
|
Encodes an IP address and port to be included in URL. in particular, this method makes sure
that IPv6 addresses have the proper formatting to be included in URLs.
@param address The address to be included in the URL.
@param port The port for the URL address.
@return The proper URL string encoded IP address and port.
|
ipAddressAndPortToUrlString
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static String socketAddressToUrlString(InetSocketAddress address) {
if (address.isUnresolved()) {
throw new IllegalArgumentException(
"Address cannot be resolved: " + address.getHostString());
}
return ipAddressAndPortToUrlString(address.getAddress(), address.getPort());
}
|
Encodes an IP address and port to be included in URL. in particular, this method makes sure
that IPv6 addresses have the proper formatting to be included in URLs.
@param address The socket address with the IP address and port.
@return The proper URL string encoded IP address and port.
|
socketAddressToUrlString
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static String hostAndPortToUrlString(String host, int port) throws UnknownHostException {
return ipAddressAndPortToUrlString(InetAddress.getByName(host), port);
}
|
Normalizes and encodes a hostname and port to be included in URL. In particular, this method
makes sure that IPv6 address literals have the proper formatting to be included in URLs.
@param host The address to be included in the URL.
@param port The port for the URL address.
@return The proper URL string encoded IP address and port.
@throws java.net.UnknownHostException Thrown, if the hostname cannot be translated into a
URL.
|
hostAndPortToUrlString
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static ServerSocket createSocketFromPorts(
Iterator<Integer> portsIterator, SocketFactory factory) {
while (portsIterator.hasNext()) {
int port = portsIterator.next();
LOG.debug("Trying to open socket on port {}", port);
try {
return factory.createSocket(port);
} catch (IOException | IllegalArgumentException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Unable to allocate socket on port", e);
} else {
LOG.info(
"Unable to allocate on port {}, due to error: {}",
port,
e.getMessage());
}
}
}
return null;
}
|
Tries to allocate a socket from the given sets of ports.
@param portsIterator A set of ports to choose from.
@param factory A factory for creating the SocketServer
@return null if no port was available or an allocated socket.
|
createSocketFromPorts
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static String getWildcardIPAddress() {
return WILDCARD_ADDRESS;
}
|
Returns the wildcard address to listen on all interfaces.
@return Either 0.0.0.0 or :: depending on the IP setup.
|
getWildcardIPAddress
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static boolean isValidClientPort(int port) {
return 1 <= port && port <= 65535;
}
|
Check whether the given port is in right range when connecting to somewhere.
@param port the port to check
@return true if the number in the range 1 to 65535
|
isValidClientPort
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static boolean isValidHostPort(int port) {
return 0 <= port && port <= 65535;
}
|
check whether the given port is in right range when getting port from local system.
@param port the port to check
@return true if the number in the range 0 to 65535
|
isValidHostPort
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/NetUtils.java
|
Apache-2.0
|
public static OperatingSystem getCurrentOperatingSystem() {
return os;
}
|
Gets the operating system that the JVM runs on from the java system properties. this method
returns <tt>UNKNOWN</tt>, if the operating system was not successfully determined.
@return The enum constant for the operating system, or <tt>UNKNOWN</tt>, if it was not
possible to determine.
|
getCurrentOperatingSystem
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/OperatingSystem.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/OperatingSystem.java
|
Apache-2.0
|
public static boolean isLinux() {
return getCurrentOperatingSystem() == LINUX;
}
|
Checks whether the operating system this JVM runs on is Linux.
@return <code>true</code> if the operating system this JVM runs on is Linux, <code>false
</code> otherwise
|
isLinux
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/OperatingSystem.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/OperatingSystem.java
|
Apache-2.0
|
public static boolean isSolaris() {
return getCurrentOperatingSystem() == SOLARIS;
}
|
Checks whether the operating system this JVM runs on is Solaris.
@return <code>true</code> if the operating system this JVM runs on is Solaris, <code>false
</code> otherwise
|
isSolaris
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/OperatingSystem.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/OperatingSystem.java
|
Apache-2.0
|
private static OperatingSystem readOSFromSystemProperties() {
String osName = System.getProperty(OS_KEY);
if (osName.startsWith(LINUX_OS_PREFIX)) {
return LINUX;
}
if (osName.startsWith(WINDOWS_OS_PREFIX)) {
return WINDOWS;
}
if (osName.startsWith(MAC_OS_PREFIX)) {
return MAC_OS;
}
if (osName.startsWith(FREEBSD_OS_PREFIX)) {
return FREE_BSD;
}
String osNameLowerCase = osName.toLowerCase();
if (osNameLowerCase.contains(SOLARIS_OS_INFIX_1)
|| osNameLowerCase.contains(SOLARIS_OS_INFIX_2)) {
return SOLARIS;
}
return UNKNOWN;
}
|
Parses the operating system that the JVM runs on from the java system properties. If the
operating system was not successfully determined, this method returns {@code UNKNOWN}.
@return The enum constant for the operating system, or {@code UNKNOWN}, if it was not
possible to determine.
|
readOSFromSystemProperties
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/OperatingSystem.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/OperatingSystem.java
|
Apache-2.0
|
public static <T> OptionalFailure<T> createFrom(CheckedSupplier<T> valueSupplier) {
try {
return of(valueSupplier.get());
} catch (Exception ex) {
return ofFailure(ex);
}
}
|
@return wrapped {@link OptionalFailure} returned by {@code valueSupplier} or wrapped failure
if {@code valueSupplier} has thrown an {@link Exception}.
|
createFrom
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/util/OptionalFailure.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/util/OptionalFailure.java
|
Apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.